Base Configs

class nerfstudio.configs.base_config.InstantiateConfig(_target: Type)#

Bases: PrintableConfig

Config class for instantiating an the class specified in the _target attribute.

setup(**kwargs) Any#

Returns the instantiated object using the config.

class nerfstudio.configs.base_config.LocalWriterConfig(_target: ~typing.Type = <class 'nerfstudio.utils.writer.LocalWriter'>, enable: bool = False, stats_to_track: ~typing.Tuple[~nerfstudio.utils.writer.EventName, ...] = (<EventName.ITER_TRAIN_TIME: 'Train Iter (time)'>, <EventName.TRAIN_RAYS_PER_SEC: 'Train Rays / Sec'>, <EventName.CURR_TEST_PSNR: 'Test PSNR'>, <EventName.VIS_RAYS_PER_SEC: 'Vis Rays / Sec'>, <EventName.TEST_RAYS_PER_SEC: 'Test Rays / Sec'>, <EventName.ETA: 'ETA (time)'>), max_log_size: int = 10)#

Bases: InstantiateConfig

Local Writer config

enable: bool = False#

if True enables local logging, else disables

max_log_size: int = 10#

maximum number of rows to print before wrapping. if 0, will print everything.

setup(banner_messages: Optional[List[str]] = None, **kwargs) Any#

Instantiate local writer


banner_messages – List of strings that always print at the bottom of screen.

stats_to_track: Tuple[EventName, ...] = (<EventName.ITER_TRAIN_TIME: 'Train Iter (time)'>, <EventName.TRAIN_RAYS_PER_SEC: 'Train Rays / Sec'>, <EventName.CURR_TEST_PSNR: 'Test PSNR'>, <EventName.VIS_RAYS_PER_SEC: 'Vis Rays / Sec'>, <EventName.TEST_RAYS_PER_SEC: 'Test Rays / Sec'>, <EventName.ETA: 'ETA (time)'>)#

specifies which stats will be logged/printed to terminal

class nerfstudio.configs.base_config.LoggingConfig(relative_log_dir: ~pathlib.Path = PosixPath('.'), steps_per_log: int = 10, max_buffer_size: int = 20, local_writer: ~nerfstudio.configs.base_config.LocalWriterConfig = LocalWriterConfig(_target=<class 'nerfstudio.utils.writer.LocalWriter'>, enable=True, stats_to_track=(<EventName.ITER_TRAIN_TIME: 'Train Iter (time)'>, <EventName.TRAIN_RAYS_PER_SEC: 'Train Rays / Sec'>, <EventName.CURR_TEST_PSNR: 'Test PSNR'>, <EventName.VIS_RAYS_PER_SEC: 'Vis Rays / Sec'>, <EventName.TEST_RAYS_PER_SEC: 'Test Rays / Sec'>, <EventName.ETA: 'ETA (time)'>), max_log_size=10), enable_profiler: bool = True)#

Bases: PrintableConfig

Configuration of loggers and profilers

enable_profiler: bool = True#

whether to enable profiling code; prints speed of functions at the end of a program. profiler logs run times of functions and prints at end of training

local_writer: LocalWriterConfig = LocalWriterConfig(_target=<class 'nerfstudio.utils.writer.LocalWriter'>, enable=True, stats_to_track=(<EventName.ITER_TRAIN_TIME: 'Train Iter (time)'>, <EventName.TRAIN_RAYS_PER_SEC: 'Train Rays / Sec'>, <EventName.CURR_TEST_PSNR: 'Test PSNR'>, <EventName.VIS_RAYS_PER_SEC: 'Vis Rays / Sec'>, <EventName.TEST_RAYS_PER_SEC: 'Test Rays / Sec'>, <EventName.ETA: 'ETA (time)'>), max_log_size=10)#

if provided, will print stats locally. if None, will disable printing

max_buffer_size: int = 20#

maximum history size to keep for computing running averages of stats. e.g. if 20, averages will be computed over past 20 occurrences.

relative_log_dir: Path = PosixPath('.')#

relative path to save all logged events

steps_per_log: int = 10#

number of steps between logging stats

class nerfstudio.configs.base_config.MachineConfig(seed: int = 42, num_gpus: int = 1, num_machines: int = 1, machine_rank: int = 0, dist_url: str = 'auto')#

Bases: PrintableConfig

Configuration of machine setup

dist_url: str = 'auto'#

distributed connection point (for DDP)

machine_rank: int = 0#

current machine’s rank (for DDP)

num_gpus: int = 1#

total number of gpus available for train/eval

num_machines: int = 1#

total number of distributed machines available (for DDP)

seed: int = 42#

random seed initialization

class nerfstudio.configs.base_config.PrintableConfig#

Bases: object

Printable Config defining str function

class nerfstudio.configs.base_config.ViewerConfig(relative_log_filename: str = 'viewer_log_filename.txt', start_train: bool = True, zmq_port: Optional[int] = None, launch_bridge_server: bool = True, websocket_port: Optional[int] = 7007, ip_address: str = '', num_rays_per_chunk: int = 32768, max_num_display_images: int = 512, quit_on_train_completion: bool = False, skip_openrelay: bool = False, codec: Literal['H264', 'VP8'] = 'VP8', local: bool = False)#

Bases: PrintableConfig

Configuration for viewer instantiation

codec: Literal['H264', 'VP8'] = 'VP8'#

Video codec that viewer will use.

ip_address: str = ''#

the ip address where the bridge server is running

launch_bridge_server: bool = True#

whether or not to launch the bridge server

local: bool = False#

If running local server instance, avoid using relays to communicate with the viewer.

max_num_display_images: int = 512#

Maximum number of training images to display in the viewer, to avoid lag. This does not change which images are actually used in training/evaluation. If -1, display all.

num_rays_per_chunk: int = 32768#

number of rays per chunk to render with viewer

quit_on_train_completion: bool = False#

Whether to kill the training job when it has completed. Note this will stop rendering in the viewer.

relative_log_filename: str = 'viewer_log_filename.txt'#

Filename to use for the log file.

skip_openrelay: bool = False#

Avoid using openrelay to communicate with the viewer. Try disabling if you have trouble connecting to the viewer

start_train: bool = True#

whether to immediately start training upon loading viewer if False, will just visualize dataset but you can toggle training in viewer

websocket_port: Optional[int] = 7007#

the default websocket port to connect to

zmq_port: Optional[int] = None#

The zmq port to connect to for communication. If None, find an available port.