/include/mitsuba/render/scene.h

https://bitbucket.org/blckshrk/ift6042 · C Header · 1187 lines · 190 code · 90 blank · 907 comment · 3 complexity · 7a2299bd603a5d4d46a9c11825b04cce MD5 · raw file

  1. /*
  2. This file is part of Mitsuba, a physically based rendering system.
  3. Copyright (c) 2007-2012 by Wenzel Jakob and others.
  4. Mitsuba is free software; you can redistribute it and/or modify
  5. it under the terms of the GNU General Public License Version 3
  6. as published by the Free Software Foundation.
  7. Mitsuba is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU General Public License for more details.
  11. You should have received a copy of the GNU General Public License
  12. along with this program. If not, see <http://www.gnu.org/licenses/>.
  13. */
  14. #pragma once
  15. #if !defined(__MITSUBA_RENDER_SCENE_H_)
  16. #define __MITSUBA_RENDER_SCENE_H_
  17. #include <mitsuba/core/netobject.h>
  18. #include <mitsuba/core/pmf.h>
  19. #include <mitsuba/core/aabb.h>
  20. #include <mitsuba/render/trimesh.h>
  21. #include <mitsuba/render/skdtree.h>
  22. #include <mitsuba/render/sensor.h>
  23. #include <mitsuba/render/integrator.h>
  24. #include <mitsuba/render/bsdf.h>
  25. #include <mitsuba/render/subsurface.h>
  26. #include <mitsuba/render/texture.h>
  27. #include <mitsuba/render/medium.h>
  28. #include <mitsuba/render/volume.h>
  29. #include <mitsuba/render/phase.h>
  30. MTS_NAMESPACE_BEGIN
  31. /**
  32. * \brief Principal scene data structure
  33. *
  34. * This class holds information on surfaces, emitters and participating media
  35. * and coordinates rendering jobs. It also provides useful query routines that
  36. * are mostly used by the \ref Integrator implementations.
  37. *
  38. * \ingroup librender
  39. * \ingroup libpython
  40. */
  41. class MTS_EXPORT_RENDER Scene : public NetworkedObject {
  42. public:
  43. // =============================================================
  44. //! @{ \name Initialization and rendering
  45. // =============================================================
  46. /// Construct a new, empty scene (with the default properties)
  47. Scene();
  48. /// Construct a new, empty scene
  49. Scene(const Properties &props);
  50. /// Create a shallow clone of a scene
  51. Scene(Scene *scene);
  52. /// Unserialize a scene from a binary data stream
  53. Scene(Stream *stream, InstanceManager *manager);
  54. /**
  55. * \brief Initialize the scene
  56. *
  57. * This function \a must be called before using any
  58. * of the methods in this class.
  59. */
  60. void initialize();
  61. /**
  62. * \brief Initialize the scene for bidirectional rendering algorithms.
  63. *
  64. * This ensures that certain "special" shapes (such as the aperture
  65. * of the sensor) are added to the scene. This function should be called
  66. * before using any of the methods in this class.
  67. */
  68. void initializeBidirectional();
  69. /**
  70. * \brief Perform any pre-processing steps before rendering
  71. *
  72. * This function should be called after \ref initialize() and
  73. * before rendering the scene. It might do a variety of things,
  74. * such as constructing photon maps or executing distributed overture
  75. * passes.
  76. *
  77. * Progress is tracked by sending status messages to a provided
  78. * render queue (the parameter \c job is required to discern multiple
  79. * render jobs occurring in parallel).
  80. *
  81. * The last three parameters are resource IDs of the associated scene,
  82. * sensor and sample generator, which have been made available to all
  83. * local and remote workers.
  84. *
  85. * \return \c true upon successful completion.
  86. */
  87. bool preprocess(RenderQueue *queue, const RenderJob *job,
  88. int sceneResID, int sensorResID, int samplerResID);
  89. /**
  90. * \brief Render the scene as seen by the scene's main sensor.
  91. *
  92. * Progress is tracked by sending status messages to a provided
  93. * render queue (the parameter \c job is required to discern multiple
  94. * render jobs occurring in parallel).
  95. *
  96. * The last three parameters are resource IDs of the associated scene,
  97. * sensor and sample generator, which have been made available to all
  98. * local and remote workers.
  99. *
  100. * \return \c true upon successful completion.
  101. */
  102. bool render(RenderQueue *queue, const RenderJob *job,
  103. int sceneResID, int sensorResID, int samplerResID);
  104. /**
  105. * \brief Perform any post-processing steps after rendering
  106. *
  107. * Progress is tracked by sending status messages to a provided
  108. * render queue (the parameter \c job is required to discern multiple
  109. * render jobs occurring in parallel).
  110. *
  111. * The last three parameters are resource IDs of the associated scene,
  112. * sensor and sample generator, which have been made available to all
  113. * local and remote workers.
  114. */
  115. void postprocess(RenderQueue *queue, const RenderJob *job,
  116. int sceneResID, int sensorResID, int samplerResID);
  117. /// Write out the current (partially rendered) image
  118. void flush();
  119. /**
  120. * \brief Cancel a running rendering job
  121. *
  122. * This function can be called asynchronously, e.g. from a GUI.
  123. * In this case, \ref render() will quit with a return value of
  124. * \c false.
  125. */
  126. void cancel();
  127. /// Add a child node to the scene
  128. void addChild(const std::string &name, ConfigurableObject *child);
  129. /// Add an unnamed child
  130. inline void addChild(ConfigurableObject *child) { addChild("", child); }
  131. /** \brief Configure this object (called \a once after construction
  132. and addition of all child \ref ConfigurableObject instances).) */
  133. void configure();
  134. //! @}
  135. // =============================================================
  136. // =============================================================
  137. //! @{ \name Ray tracing
  138. // =============================================================
  139. /**
  140. * \brief Intersect a ray against all primitives stored in the scene
  141. * and return detailed intersection information
  142. *
  143. * \param ray
  144. * A 3-dimensional ray data structure with minimum/maximum
  145. * extent information, as well as a time value (which applies
  146. * when the shapes are in motion)
  147. *
  148. * \param its
  149. * A detailed intersection record, which will be filled by the
  150. * intersection query
  151. *
  152. * \return \c true if an intersection was found
  153. */
  154. inline bool rayIntersect(const Ray &ray, Intersection &its) const {
  155. return m_kdtree->rayIntersect(ray, its);
  156. }
  157. /**
  158. * \brief Intersect a ray against all primitives stored in the scene
  159. * and return the traveled distance and intersected shape
  160. *
  161. * This function represents a performance improvement when the
  162. * intersected shape must be known, but there is no need for
  163. * a detailed intersection record.
  164. *
  165. * \param ray
  166. * A 3-dimensional ray data structure with minimum/maximum
  167. * extent information, as well as a time value (which applies
  168. * when the shapes are in motion)
  169. *
  170. * \param t
  171. * The traveled ray distance will be stored in this parameter
  172. * \param shape
  173. * A pointer to the intersected shape will be stored in this
  174. * parameter
  175. *
  176. * \param n
  177. * The geometric surface normal will be stored in this parameter
  178. *
  179. * \param uv
  180. * The UV coordinates associated with the intersection will
  181. * be stored here.
  182. *
  183. * \return \c true if an intersection was found
  184. */
  185. inline bool rayIntersect(const Ray &ray, Float &t,
  186. ConstShapePtr &shape, Normal &n, Point2 &uv) const {
  187. return m_kdtree->rayIntersect(ray, t, shape, n, uv);
  188. }
  189. /**
  190. * \brief Intersect a ray against all primitives stored in the scene
  191. * and \a only determine whether or not there is an intersection.
  192. *
  193. * This is by far the fastest ray tracing method. This performance
  194. * improvement comes with a major limitation though: this function
  195. * cannot provide any additional information about the detected
  196. * intersection (not even its position).
  197. *
  198. * \param ray
  199. * A 3-dimensional ray data structure with minimum/maximum
  200. * extent information, as well as a time value (which applies
  201. * when the shapes are in motion)
  202. *
  203. * \return \c true if an intersection was found
  204. */
  205. inline bool rayIntersect(const Ray &ray) const {
  206. return m_kdtree->rayIntersect(ray);
  207. }
  208. /**
  209. * \brief Return the transmittance between \c p1 and \c p2 at the
  210. * specified time.
  211. *
  212. * This function is essentially a continuous version of \ref isOccluded(),
  213. * which additionally accounts for the presence of participating media
  214. * and surface interactions that attenuate a ray without changing
  215. * its direction (i.e. geometry with an alpha mask)
  216. *
  217. * The implementation correctly handles arbitrary amounts of index-matched
  218. * medium transitions. The \c interactions parameter can be used to
  219. * specify a maximum number of possible surface interactions and medium
  220. * transitions between \c p1 and \c p2. When this number is exceeded,
  221. * the function returns zero.
  222. *
  223. * Note that index-mismatched boundaries (i.e. a transition from air to
  224. * water) are not supported by this function. The integrator needs to take
  225. * care of these in some other way.
  226. *
  227. * \param p1
  228. * Source position
  229. * \param p2
  230. * Target position
  231. * \param p1OnSurface
  232. * Is the source position located on a surface? This information is
  233. * necessary to set up the right ray epsilons for the kd-tree traversal
  234. * \param p2OnSurface
  235. * Is the target position located on a surface?
  236. * \param medium
  237. * The medium at \c p1
  238. * \param interactions
  239. * Specifies the maximum permissible number of index-matched medium
  240. * transitions or \ref BSDF::ENull scattering events on the way
  241. * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
  242. * When the function returns a nonzero result, this parameter will
  243. * additionally be used to return the actual number of intermediate
  244. * interactions.
  245. * \param time
  246. * Associated scene time value for the transmittance computation
  247. * \param sampler
  248. * Optional: A sample generator. This may be used
  249. * to compute a random unbiased estimate of the transmission.
  250. * \return An spectral-valued transmittance value with components
  251. * between zero and one.
  252. */
  253. Spectrum evalTransmittance(const Point &p1, bool p1OnSurface,
  254. const Point &p2, bool p2OnSurface, Float time, const Medium *medium,
  255. int &interactions, Sampler *sampler = NULL) const;
  256. //! @}
  257. // =============================================================
  258. // =============================================================
  259. //! @{ \name Ray tracing support for bidirectional algorithms
  260. // =============================================================
  261. /**
  262. * \brief Intersect a ray against all scene primitives \a and
  263. * "special" primitives, such as the aperture of a sensor.
  264. *
  265. * This function does exactly the same thing as \ref rayIntersect,
  266. * except that it additionally performs intersections against a
  267. * list of "special" shapes that are intentionally kept outside
  268. * of the main scene kd-tree (e.g. because they are not static
  269. * and might change from rendering to rendering). This is needed
  270. * by some bidirectional techniques that e.g. care about
  271. * intersections with the sensor aperture.
  272. *
  273. * \param ray
  274. * A 3-dimensional ray data structure with minimum/maximum
  275. * extent information, as well as a time value (which applies
  276. * when the shapes are in motion)
  277. *
  278. * \param its
  279. * A detailed intersection record, which will be filled by the
  280. * intersection query
  281. *
  282. * \return \c true if an intersection was found
  283. */
  284. bool rayIntersectAll(const Ray &ray, Intersection &its) const;
  285. /**
  286. * \brief Intersect a ray against all normal and "special" primitives
  287. * and only return the traveled distance and intersected shape
  288. *
  289. * This function represents a performance improvement when the
  290. * intersected shape must be known, but there is no need for
  291. * a detailed intersection record.
  292. *
  293. * This function does exactly the same thing as \ref rayIntersect,
  294. * except that it additionally performs intersections against a
  295. * list of "special" shapes that are intentionally kept outside
  296. * of the main scene kd-tree (e.g. because they are not static
  297. * and might change from rendering to rendering). This is needed
  298. * by some bidirectional techniques that e.g. care about
  299. * intersections with the sensor aperture.
  300. *
  301. * \param ray
  302. * A 3-dimensional ray data structure with minimum/maximum
  303. * extent information, as well as a time value (which applies
  304. * when the shapes are in motion)
  305. *
  306. * \param t
  307. * The traveled ray distance will be stored in this parameter
  308. * \param shape
  309. * A pointer to the intersected shape will be stored in this
  310. * parameter
  311. *
  312. * \param n
  313. * The geometric surface normal will be stored in this parameter
  314. *
  315. * \param uv
  316. * The UV coordinates associated with the intersection will
  317. * be stored here.
  318. *
  319. * \return \c true if an intersection was found
  320. */
  321. bool rayIntersectAll(const Ray &ray, Float &t,
  322. ConstShapePtr &shape, Normal &n, Point2 &uv) const;
  323. /**
  324. * \brief Intersect a ray against all normal and "special" primitives
  325. * and \a only determine whether or not there is an intersection.
  326. *
  327. * This is by far the fastest ray tracing method. This performance
  328. * improvement comes with a major limitation though: this function
  329. * cannot provide any additional information about the detected
  330. * intersection (not even its position).
  331. *
  332. * This function does exactly the same thing as \ref rayIntersect,
  333. * except that it additionally performs intersections against a
  334. * list of "special" shapes that are intentionally kept outside
  335. * of the main scene kd-tree (e.g. because they are not static
  336. * and might change from rendering to rendering). This is needed
  337. * by some bidirectional techniques that e.g. care about
  338. * intersections with the sensor aperture.
  339. *
  340. * \param ray
  341. * A 3-dimensional ray data structure with minimum/maximum
  342. * extent information, as well as a time value (which applies
  343. * when the shapes are in motion)
  344. *
  345. * \return \c true if an intersection was found
  346. */
  347. bool rayIntersectAll(const Ray &ray) const;
  348. /**
  349. * \brief Return the transmittance between \c p1 and \c p2 at the
  350. * specified time (and acount for "special" primitives).
  351. *
  352. * This function is essentially a continuous version of \ref isOccluded(),
  353. * which additionally accounts for the presence of participating media
  354. * and surface interactions that attenuate a ray without changing
  355. * its direction (i.e. geometry with an alpha mask)
  356. *
  357. * The implementation correctly handles arbitrary amounts of index-matched
  358. * medium transitions. The \c interactions parameter can be used to
  359. * specify a maximum number of possible surface interactions and medium
  360. * transitions between \c p1 and \c p2. When this number is exceeded,
  361. * the function returns zero.
  362. *
  363. * Note that index-mismatched boundaries (i.e. a transition from air to
  364. * water) are not supported by this function. The integrator needs to take
  365. * care of these in some other way.
  366. *
  367. * This function does exactly the same thing as \ref evalTransmittance,
  368. * except that it additionally performs intersections against a
  369. * list of "special" shapes that are intentionally kept outside
  370. * of the main scene kd-tree (e.g. because they are not static
  371. * and might change from rendering to rendering). This is needed
  372. * by some bidirectional techniques that care about intersections
  373. * with the sensor aperture, etc.
  374. *
  375. * \param p1
  376. * Source position
  377. * \param p2
  378. * Target position
  379. * \param p1OnSurface
  380. * Is the source position located on a surface? This information is
  381. * necessary to set up the right ray epsilons for the kd-tree traversal
  382. * \param p2OnSurface
  383. * Is the target position located on a surface?
  384. * \param medium
  385. * The medium at \c p1
  386. * \param interactions
  387. * Specifies the maximum permissible number of index-matched medium
  388. * transitions or \ref BSDF::ENull scattering events on the way
  389. * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
  390. * When the function returns a nonzero result, this parameter will
  391. * additionally be used to return the actual number of intermediate
  392. * interactions.
  393. * \param time
  394. * Associated scene time value for the transmittance computation
  395. * \param sampler
  396. * Optional: A sample generator. This may be used
  397. * to compute a random unbiased estimate of the transmission.
  398. * \return An spectral-valued transmittance value with components
  399. * between zero and one.
  400. */
  401. Spectrum evalTransmittanceAll(const Point &p1, bool p1OnSurface,
  402. const Point &p2, bool p2OnSurface, Float time, const Medium *medium,
  403. int &interactions, Sampler *sampler = NULL) const;
  404. //! @}
  405. // =============================================================
  406. // =============================================================
  407. //! @{ \name Direct sampling techniques
  408. // =============================================================
  409. /**
  410. * \brief Direct illumination sampling routine
  411. *
  412. * Given an arbitrary reference point in the scene, this method samples a
  413. * position on an emitter that has a nonzero contribution towards that point.
  414. *
  415. * Ideally, the implementation should importance sample the product of
  416. * the emission profile and the geometry term between the reference point
  417. * and the position on the emitter.
  418. *
  419. * \param dRec
  420. * A direct illumination sampling record that specifies the
  421. * reference point and a time value. After the function terminates,
  422. * it will be populated with the position sample and related information
  423. *
  424. * \param sample
  425. * A uniformly distributed 2D vector
  426. *
  427. * \param testVisibility
  428. * When set to \c true, a shadow ray will be cast to ensure that the
  429. * sampled emitter position and the reference point are mutually visible.
  430. *
  431. * \return
  432. * An importance weight given by the radiance received along
  433. * the sampled ray divided by the sample probability.
  434. */
  435. Spectrum sampleEmitterDirect(DirectSamplingRecord &dRec,
  436. const Point2 &sample, bool testVisibility = true) const;
  437. /**
  438. * \brief "Direct illumination" sampling routine for the main scene sensor
  439. *
  440. * Given an arbitrary reference point in the scene, this method samples a
  441. * position on an sensor that has a nonzero contribution towards that point.
  442. * This function can be interpreted as a generalization of a direct
  443. * illumination sampling strategy to sensors.
  444. *
  445. * Ideally, the implementation should importance sample the product of
  446. * the response profile and the geometry term between the reference point
  447. * and the position on the emitter.
  448. *
  449. * \param dRec
  450. * A direct illumination sampling record that specifies the
  451. * reference point and a time value. After the function terminates,
  452. * it will be populated with the position sample and related information
  453. *
  454. * \param sample
  455. * A uniformly distributed 2D vector
  456. *
  457. * \param testVisibility
  458. * When set to \c true, a shadow ray will be cast to ensure that the
  459. * sampled sensor position and the reference point are mutually visible.
  460. *
  461. * \return
  462. * An importance weight given by the importance emitted along
  463. * the sampled ray divided by the sample probability.
  464. */
  465. Spectrum sampleSensorDirect(DirectSamplingRecord &dRec,
  466. const Point2 &sample, bool testVisibility = true) const;
  467. /**
  468. * \brief Direct illumination sampling with support for participating
  469. * media (medium variant)
  470. *
  471. * Given an arbitrary reference point in the scene, this method samples a
  472. * position on an emitter that has a nonzero contribution towards that point.
  473. * In comparison to \ref sampleEmitterDirect, this version also accounts for
  474. * attenuation by participating media and should be used when \c dRec.p
  475. * lies \a inside a medium, i.e. \a not on a surface!
  476. *
  477. * Ideally, the implementation should importance sample the product of
  478. * the emission profile and the geometry term between the reference point
  479. * and the position on the emitter.
  480. *
  481. * \param dRec
  482. * A direct illumination sampling record that specifies the
  483. * reference point and a time value. After the function terminates,
  484. * it will be populated with the position sample and related information
  485. *
  486. * \param medium
  487. * The medium located at the reference point (or \c NULL for vacuum).
  488. *
  489. * \param interactions
  490. * Specifies the maximum permissible number of index-matched medium
  491. * transitions or \ref BSDF::ENull scattering events on the way
  492. * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
  493. * When the function returns a nonzero result, this parameter will
  494. * additionally be used to return the actual number of intermediate
  495. * interactions.
  496. *
  497. * \param sample
  498. * A uniformly distributed 2D vector
  499. *
  500. * \param sampler
  501. * Optional: a pointer to a sample generator. Some particular
  502. * implementations can do a better job at sampling when they have
  503. * access to additional random numbers.
  504. *
  505. * \return
  506. * An importance weight given by the radiance received along
  507. * the sampled ray divided by the sample probability.
  508. */
  509. Spectrum sampleAttenuatedEmitterDirect(DirectSamplingRecord &dRec,
  510. const Medium *medium, int &interactions, const Point2 &sample,
  511. Sampler *sampler = NULL) const;
  512. /**
  513. * \brief "Direct illumination" sampling routine for the main scene sensor
  514. * with support for participating media (medium variant)
  515. *
  516. * Given an arbitrary reference point in the scene, this method samples a
  517. * position on an sensor that has a nonzero response towards that point.
  518. * In comparison to \ref sampleSensorDirect, this version also accounts for
  519. * attenuation by participating media and should be used when \c dRec.p
  520. * lies \a inside a medium, i.e. \a not on a surface!
  521. * This function can be interpreted as a generalization of a direct
  522. * illumination sampling strategy to sensors.
  523. *
  524. * Ideally, the implementation should importance sample the product of
  525. * the response profile and the geometry term between the reference point
  526. * and the position on the sensor.
  527. *
  528. * \param dRec
  529. * A direct illumination sampling record that specifies the
  530. * reference point and a time value. After the function terminates,
  531. * it will be populated with the position sample and related information
  532. *
  533. * \param medium
  534. * The medium located at the reference point (or \c NULL for vacuum).
  535. *
  536. * \param interactions
  537. * Specifies the maximum permissible number of index-matched medium
  538. * transitions or \ref BSDF::ENull scattering events on the way
  539. * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
  540. * When the function returns a nonzero result, this parameter will
  541. * additionally be used to return the actual number of intermediate
  542. * interactions.
  543. *
  544. * \param sample
  545. * A uniformly distributed 2D vector
  546. *
  547. * \param sampler
  548. * Optional: a pointer to a sample generator. Some particular
  549. * implementations can do a better job at sampling when they have
  550. * access to additional random numbers.
  551. *
  552. * \return
  553. * An importance weight given by the radiance received along
  554. * the sampled ray divided by the sample probability.
  555. */
  556. Spectrum sampleAttenuatedSensorDirect(DirectSamplingRecord &dRec,
  557. const Medium *medium, int &interactions, const Point2 &sample,
  558. Sampler *sampler = NULL) const;
  559. /**
  560. * \brief Direct illumination sampling with support for participating
  561. * media (surface variant)
  562. *
  563. * Given an arbitrary reference point in the scene, this method samples a
  564. * position on an emitter that has a nonzero contribution towards that point.
  565. * In comparison to \ref sampleEmitterDirect, this version also accounts for
  566. * attenuation by participating media and should be used when the target
  567. * position lies on a surface.
  568. *
  569. * Ideally, the implementation should importance sample the product of
  570. * the emission profile and the geometry term between the reference point
  571. * and the position on the emitter.
  572. *
  573. * \param dRec
  574. * A direct illumination sampling record that specifies the
  575. * reference point and a time value. After the function terminates,
  576. * it will be populated with the position sample and related information
  577. *
  578. * \param its
  579. * An intersection record associated with the reference point in
  580. * \c dRec. This record is needed to determine the participating
  581. * medium between the emitter sample and the reference point
  582. * when \c its marks a medium transition.
  583. *
  584. * \param medium
  585. * The medium located at \c its (or \c NULL for vacuum). When the shape
  586. * associated with \c its marks a medium transition, it does not matter
  587. * which of the two media is specified.
  588. *
  589. * \param interactions
  590. * Specifies the maximum permissible number of index-matched medium
  591. * transitions or \ref BSDF::ENull scattering events on the way
  592. * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
  593. * When the function returns a nonzero result, this parameter will
  594. * additionally be used to return the actual number of intermediate
  595. * interactions.
  596. *
  597. * \param sample
  598. * A uniformly distributed 2D vector
  599. *
  600. * \param sampler
  601. * Optional: a pointer to a sample generator. Some particular
  602. * implementations can do a better job at sampling when they have
  603. * access to additional random numbers.
  604. *
  605. * \return
  606. * An importance weight given by the radiance received along
  607. * the sampled ray divided by the sample probability.
  608. */
  609. Spectrum sampleAttenuatedEmitterDirect(DirectSamplingRecord &dRec,
  610. const Intersection &its, const Medium *medium, int &interactions,
  611. const Point2 &sample, Sampler *sampler = NULL) const;
  612. /**
  613. * \brief "Direct illumination" sampling routine for the main scene sensor
  614. * with support for participating media (surface variant)
  615. *
  616. * Given an arbitrary reference point in the scene, this method samples a
  617. * position on an sensor that has a nonzero response towards that point.
  618. * In comparison to \ref sampleSensorDirect, this version also accounts for
  619. * attenuation by participating media and should be used when the target
  620. * position lies on a surface.
  621. *
  622. * Ideally, the implementation should importance sample the product of
  623. * the emission profile and the geometry term between the reference point
  624. * and the position on the sensor.
  625. *
  626. * \param dRec
  627. * A direct illumination sampling record that specifies the
  628. * reference point and a time value. After the function terminates,
  629. * it will be populated with the position sample and related information
  630. *
  631. * \param its
  632. * An intersection record associated with the reference point in
  633. * \c dRec. This record is needed to determine the participating
  634. * medium between the sensor sample and the reference point
  635. * when \c its marks a medium transition.
  636. *
  637. * \param medium
  638. * The medium located at \c its (or \c NULL for vacuum). When the shape
  639. * associated with \c its marks a medium transition, it does not matter
  640. * which of the two media is specified.
  641. *
  642. * \param interactions
  643. * Specifies the maximum permissible number of index-matched medium
  644. * transitions or \ref BSDF::ENull scattering events on the way
  645. * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
  646. * When the function returns a nonzero result, this parameter will
  647. * additionally be used to return the actual number of intermediate
  648. * interactions.
  649. *
  650. * \param sample
  651. * A uniformly distributed 2D vector
  652. *
  653. * \param sampler
  654. * Optional: a pointer to a sample generator. Some particular
  655. * implementations can do a better job at sampling when they have
  656. * access to additional random numbers.
  657. *
  658. * \return
  659. * An importance weight given by the radiance received along
  660. * the sampled ray divided by the sample probability.
  661. */
  662. Spectrum sampleAttenuatedSensorDirect(DirectSamplingRecord &dRec,
  663. const Intersection &its, const Medium *medium, int &interactions,
  664. const Point2 &sample, Sampler *sampler = NULL) const;
  665. /**
  666. * \brief Evaluate the probability density of the \a direct sampling
  667. * method implemented by the \ref sampleEmitterDirect() method.
  668. *
  669. * \param dRec
  670. * A direct sampling record, which specifies the query
  671. * location. Note that this record need not be completely
  672. * filled out. The important fields are \c p, \c n, \c ref,
  673. * \c dist, \c d, \c measure, and \c uv.
  674. *
  675. * \param p
  676. * The world-space position that would have been passed to \ref
  677. * sampleEmitterDirect()
  678. *
  679. * \return
  680. * The density expressed with respect to the requested measure
  681. * (usually \ref ESolidAngle)
  682. */
  683. Float pdfEmitterDirect(const DirectSamplingRecord &dRec) const;
  684. /**
  685. * \brief Evaluate the probability density of the \a direct sampling
  686. * method implemented by the \ref sampleSensorDirect() method.
  687. *
  688. * \param dRec
  689. * A direct sampling record, which specifies the query
  690. * location. Note that this record need not be completely
  691. * filled out. The important fields are \c p, \c n, \c ref,
  692. * \c dist, \c d, \c measure, and \c uv.
  693. *
  694. * \param p
  695. * The world-space position that would have been passed to \ref
  696. * sampleSensorDirect()
  697. *
  698. * \return
  699. * The density expressed with respect to the requested measure
  700. * (usually \ref ESolidAngle)
  701. */
  702. Float pdfSensorDirect(const DirectSamplingRecord &dRec) const;
  703. //! @}
  704. // =============================================================
  705. // =============================================================
  706. //! @{ \name Emission sampling techniques
  707. // =============================================================
  708. /**
  709. * \brief Sample a position according to the emission profile
  710. * defined by the emitters in the scene.
  711. *
  712. * To sample the directional component, please use the
  713. * \ref Emitter::sampleDirection() method.
  714. *
  715. * \param pRec
  716. * A position record to be populated with the sampled
  717. * position and related information
  718. *
  719. * \param sample
  720. * A uniformly distributed 2D vector
  721. *
  722. * \return
  723. * An importance weight associated with the sampled position.
  724. * This accounts for the difference in the spatial part of the
  725. * emission profile and the density function.
  726. */
  727. Spectrum sampleEmitterPosition(PositionSamplingRecord &pRec,
  728. const Point2 &sample) const;
  729. /**
  730. * \brief Sample a position on the main sensor of the scene.
  731. *
  732. * This function is provided here mainly for symmetry
  733. * with respect to \ref sampleEmitterPosition().
  734. *
  735. * To sample the directional component, please use the
  736. * \ref Sensor::sampleDirection() method.
  737. *
  738. * \param pRec
  739. * A position record to be populated with the sampled
  740. * position and related information
  741. *
  742. * \param sample
  743. * A uniformly distributed 2D vector
  744. *
  745. * \param extra
  746. * An additional 2D vector provided to the sampling
  747. * routine -- its use is implementation-dependent.
  748. *
  749. * \return
  750. * An importance weight associated with the sampled position.
  751. * This accounts for the difference in the spatial part of the
  752. * response profile and the density function.
  753. */
  754. inline Spectrum sampleSensorPosition(PositionSamplingRecord &pRec,
  755. const Point2 &sample, const Point2 *extra = NULL) const {
  756. pRec.object = m_sensor.get();
  757. return m_sensor->samplePosition(pRec, sample, extra);
  758. }
  759. /**
  760. * \brief Evaluate the spatial component of the sampling density
  761. * implemented by the \ref sampleEmitterPosition() method
  762. *
  763. * \param pRec
  764. * A position sampling record, which specifies the query location
  765. *
  766. * \return
  767. * The area density at the supplied position
  768. */
  769. Float pdfEmitterPosition(const PositionSamplingRecord &pRec) const;
  770. /**
  771. * \brief Evaluate the spatial component of the sampling density
  772. * implemented by the \ref sampleSensorPosition() method
  773. *
  774. * \param pRec
  775. * A position sampling record, which specifies the query location
  776. *
  777. * \return
  778. * The area density at the supplied position
  779. */
  780. inline Float pdfSensorPosition(const PositionSamplingRecord &pRec) const {
  781. return m_sensor->pdfPosition(pRec);
  782. }
  783. /**
  784. * \brief Return the discrete probability of choosing a
  785. * certain emitter in <tt>sampleEmitter*</tt>
  786. */
  787. inline Float pdfEmitterDiscrete(const Emitter *emitter) const {
  788. return emitter->getSamplingWeight() * m_emitterPDF.getNormalization();
  789. }
  790. /**
  791. * \brief Sample a position according to the emission profile
  792. * defined by the emitters in the scene.
  793. *
  794. * To sample the directional component, please use the
  795. * \ref Emitter::sampleDirection() method.
  796. *
  797. * \param pRec
  798. * A position record to be populated with the sampled
  799. * position and related information
  800. *
  801. * \param sample
  802. * A uniformly distributed 2D vector
  803. *
  804. * \return
  805. * An importance weight associated with the sampled position.
  806. * This accounts for the difference in the spatial part of the
  807. * emission profile and the density function.
  808. */
  809. /**
  810. * \brief Importance sample a ray according to the emission profile
  811. * defined by the sensors in the scene
  812. *
  813. * This function combines both steps of choosing a ray origin and
  814. * direction value. It does not return any auxiliary sampling
  815. * information and is mainly meant to be used by unidirectional
  816. * rendering techniques.
  817. *
  818. * Note that this function potentially uses a different sampling
  819. * strategy compared to the sequence of running \ref sampleEmitterPosition()
  820. * and \ref Emitter::sampleDirection(). The reason for this is that it may
  821. * be possible to switch to a better technique when sampling both
  822. * position and direction at the same time.
  823. *
  824. * \param ray
  825. * A ray data structure to be populated with a position
  826. * and direction value
  827. *
  828. * \param spatialSample
  829. * Denotes the sample that is used to choose the spatial component
  830. *
  831. * \param directionalSample
  832. * Denotes the sample that is used to choose the directional component
  833. *
  834. * \param time
  835. * Scene time value to be associated with the sample
  836. *
  837. * \return
  838. * An importance weight associated with the sampled ray.
  839. * This accounts for the difference between the emission profile
  840. * and the sampling density function.
  841. */
  842. Spectrum sampleEmitterRay(Ray &ray,
  843. const Emitter* &emitter,
  844. const Point2 &spatialSample,
  845. const Point2 &directionalSample,
  846. Float time) const;
  847. //! @}
  848. // =============================================================
  849. // =============================================================
  850. //! @{ \name Environment emitters
  851. // =============================================================
  852. /// Return the scene's environment emitter (if there is one)
  853. inline const Emitter *getEnvironmentEmitter() const { return m_environmentEmitter.get(); }
  854. /// Does the scene have a environment emitter?
  855. inline bool hasEnvironmentEmitter() const { return m_environmentEmitter.get() != NULL; }
  856. /**
  857. * \brief Return the environment radiance for a ray that did not intersect
  858. * any of the scene objects.
  859. *
  860. * This is primarily meant for path tracing-style integrators.
  861. */
  862. inline Spectrum evalEnvironment(const RayDifferential &ray) const {
  863. return hasEnvironmentEmitter() ?
  864. m_environmentEmitter->evalEnvironment(ray) : Spectrum(0.0f);
  865. }
  866. /**
  867. * \brief Return the environment radiance for a ray that did not intersect
  868. * any of the scene objects. This method additionally considers
  869. * transmittance by participating media
  870. *
  871. * This is primarily meant for path tracing-style integrators.
  872. */
  873. inline Spectrum evalAttenuatedEnvironment(const RayDifferential &ray,
  874. const Medium *medium, Sampler *sampler) const {
  875. if (!m_environmentEmitter)
  876. return Spectrum(0.0f);
  877. Spectrum result = evalEnvironment(ray);
  878. if (medium)
  879. result *= medium->evalTransmittance(ray, sampler);
  880. return result;
  881. }
  882. //! @}
  883. // =============================================================
  884. // =============================================================
  885. //! @{ \name Miscellaneous
  886. // =============================================================
  887. /// Return an axis-aligned bounding box containing the whole scene
  888. inline const AABB &getAABB() const {
  889. return m_aabb;
  890. }
  891. /**
  892. * \brief Is the main scene sensor degenerate? (i.e. has it
  893. * collapsed to a point or line)
  894. *
  895. * Note that this function only cares about the spatial component
  896. * of the sensor -- its value does not depend on whether the directional
  897. * response function is degenerate.
  898. */
  899. inline bool hasDegenerateSensor() const { return m_degenerateSensor; }
  900. /**
  901. * \brief Area \a all emitters in this scene degenerate?
  902. * (i.e. they has collapsed to a point or line)
  903. *
  904. * Note that this function only cares about the spatial component
  905. * of the emitters -- its value does not depend on whether the
  906. * directional emission profile is degenerate.
  907. */
  908. inline bool hasDegenerateEmitters() const { return m_degenerateEmitters; }
  909. /// Return a bounding sphere containing the whole scene
  910. inline BSphere getBSphere() const {
  911. // todo: switch to something smarter at some point
  912. return m_aabb.getBSphere();
  913. }
  914. /// Does the scene contain participating media?
  915. inline bool hasMedia() const { return !m_media.empty(); }
  916. /**
  917. * \brief Set the main scene sensor.
  918. *
  919. * Note that the main sensor is not included when this Scene instance
  920. * is serialized -- the sensor field will be \c NULL after
  921. * unserialization. This is intentional so that the sensor can
  922. * be changed without having to re-transmit the whole scene.
  923. * Hence, it needs to be submitted separately and re-attached
  924. * on the remote side using \ref setSensor().
  925. **/
  926. void setSensor(Sensor *sensor);
  927. /// \brief Remove a sensor from the scene's sensor list
  928. void removeSensor(Sensor *sensor);
  929. /// \brief Add a sensor to the scene's sensor list
  930. void addSensor(Sensor *sensor);
  931. /// Return the scene's sensor
  932. inline Sensor *getSensor() { return m_sensor; }
  933. /// Return the scene's sensor (const version)
  934. inline const Sensor *getSensor() const { return m_sensor.get(); }
  935. /**
  936. * \brief Return the list of sensors that are specified
  937. * by the scene.
  938. *
  939. * As scene can have multiple sensors -- however, during
  940. * a rendering, there will always be one "main" sensor that
  941. * is currently active.
  942. *
  943. * \sa getSensor
  944. */
  945. inline ref_vector<Sensor> &getSensors() { return m_sensors; }
  946. /**
  947. * \brief Return the list of sensors that are specified
  948. * by the scene (const version)
  949. *
  950. * As scene can have multiple sensors -- however, during
  951. * a rendering, there will always be one "main" sensor that
  952. * is currently active.
  953. *
  954. * \sa getSensor
  955. */
  956. inline const ref_vector<Sensor> &getSensors() const { return m_sensors; }
  957. /**
  958. * \brief Set the scene's integrator.
  959. *
  960. * Note that the integrator is not included when this Scene instance
  961. * is serialized -- the integrator field will be \c NULL after
  962. * unserialization. This is intentional so that the integrator can
  963. * be changed without having to re-transmit the whole scene. Hence,
  964. * the integrator needs to be submitted separately and re-attached
  965. * on the remote side using \ref setIntegrator().
  966. **/
  967. inline void setIntegrator(Integrator *integrator) { m_integrator = integrator; }
  968. /// Return the scene's integrator
  969. inline Integrator *getIntegrator() { return m_integrator; }
  970. /// Return the scene's integrator (const version)
  971. inline const Integrator *getIntegrator() const { return m_integrator.get(); }
  972. /**
  973. * \brief Set the scene's sampler.
  974. *
  975. * Note that the sampler is not included when this Scene instance
  976. * is serialized -- the sampler field will be \c NULL after
  977. * unserialization. This is intentional so that the sampler can
  978. * be changed without having to re-transmit the whole scene.
  979. * Hence, the sampler needs to be submitted separately
  980. * and re-attached on the remote side using \ref setSampler().
  981. **/
  982. inline void setSampler(Sampler *sampler) { m_sampler = sampler; }
  983. /**
  984. * \brief Return the scene's sampler.
  985. *
  986. * Note that when rendering using multiple different threads, each
  987. * thread will be passed a shallow copy of the scene, which has a
  988. * different sampler instance. This helps to avoid locking/contention
  989. * issues and ensures that different threads render with different
  990. * random number sequences. The sampler instance provided here is a
  991. * clone of the original sampler specified in the sensor.
  992. */
  993. inline Sampler *getSampler() { return m_sampler; }
  994. /// Return the scene's sampler
  995. inline const Sampler *getSampler() const { return m_sampler.get(); }
  996. /// Return the scene's film
  997. inline Film *getFilm() { return m_sensor->getFilm(); }
  998. /// Return the scene's film
  999. inline const Film *getFilm() const { return m_sensor->getFilm(); }
  1000. /// Return the scene's kd-tree accelerator
  1001. inline ShapeKDTree *getKDTree() { return m_kdtree; }
  1002. /// Return the scene's kd-tree accelerator
  1003. inline const ShapeKDTree *getKDTree() const { return m_kdtree.get(); }
  1004. /// Return the a list of all subsurface integrators
  1005. inline ref_vector<Subsurface> &getSubsurfaceIntegrators() { return m_ssIntegrators; }
  1006. /// Return the a list of all subsurface integrators
  1007. inline const ref_vector<Subsurface> &getSubsurfaceIntegrators() const { return m_ssIntegrators; }
  1008. /// Return the scene's triangular meshes (a subset of \ref getShapes())
  1009. inline std::vector<TriMesh *> &getMeshes() { return m_meshes; }
  1010. /// Return the scene's triangular meshes (a subset of \ref getShapes())
  1011. inline const std::vector<TriMesh *> &getMeshes() const { return m_meshes; }
  1012. /// Return the scene's normal shapes (including triangular meshes)
  1013. inline ref_vector<Shape> &getShapes() { return m_shapes; }
  1014. /// Return the scene's normal shapes (including triangular meshes)
  1015. inline const ref_vector<Shape> &getShapes() const { return m_shapes; }
  1016. /**
  1017. * \brief Return the scene's shapes (including triangular meshes)
  1018. */
  1019. inline ref_vector<Shape> &getSpecialShapes() { return m_specialShapes; }
  1020. /// Return the scene's shapes (including triangular meshes)
  1021. inline const ref_vector<Shape> &getSpecialShapes() const { return m_specialShapes; }
  1022. /// Return the scene's emitters
  1023. inline ref_vector<Emitter> &getEmitters() { return m_emitters; }
  1024. /// Return the scene's emitters
  1025. inline const ref_vector<Emitter> &getEmitters() const { return m_emitters; }
  1026. /// Return the scene's participating media
  1027. inline ref_vector<Medium> &getMedia() { return m_media; }
  1028. /// Return the scene's participating media
  1029. inline const ref_vector<Medium> &getMedia() const { return m_media; }
  1030. /// Return referenced objects (such as textures, BSDFs)
  1031. inline ref_vector<ConfigurableObject> &getReferencedObjects() { return m_objects; }
  1032. /// Return referenced objects (such as textures, BSDFs)
  1033. inline const ref_vector<ConfigurableObject> &getReferencedObjects() const { return m_objects; }
  1034. /// Return the name of the file containing the original description of this scene
  1035. inline const fs::path &getSourceFile() const { return *m_sourceFile; }
  1036. /// Set the name of the file containing the original description of this scene
  1037. void setSourceFile(const fs::path &name);
  1038. /// Return the render output filename
  1039. inline const fs::path &getDestinationFile() const { return *m_destinationFile; }
  1040. /// Set the render output filename
  1041. void setDestinationFile(const fs::path &name);
  1042. /// Does the destination file already exist?
  1043. inline bool destinationExists() const { return m_sensor->getFilm()->destinationExists(*m_destinationFile); }
  1044. /// Set the block resolution used to split images into parallel workloads
  1045. inline void setBlockSize(uint32_t size) { m_blockSize = size; }
  1046. /// Return the block resolution used to split images into parallel workloads
  1047. inline uint32_t getBlockSize() const { return m_blockSize; }
  1048. /// Serialize the whole scene to a network/file stream
  1049. void serialize(Stream *stream, InstanceManager *manager) const;
  1050. /* NetworkedObject implementation */
  1051. void bindUsedResources(ParallelProcess *proc) const;
  1052. void wakeup(ConfigurableObject *parent,
  1053. std::map<std::string, SerializableObject *> &params);
  1054. /// Return a string representation
  1055. std::string toString() const;
  1056. //! @}
  1057. // =============================================================
  1058. MTS_DECLARE_CLASS()
  1059. protected:
  1060. /// Virtual destructor
  1061. virtual ~Scene();
  1062. /// \cond
  1063. /// Add a shape to the scene
  1064. void addShape(Shape *shape);
  1065. /// \endcond
  1066. private:
  1067. ref<ShapeKDTree> m_kdtree;
  1068. ref<Sensor> m_sensor;
  1069. ref<Integrator> m_integrator;
  1070. ref<Sampler> m_sampler;
  1071. ref<Emitter> m_environmentEmitter;
  1072. ref_vector<Shape> m_shapes;
  1073. ref_vector<Shape> m_specialShapes;
  1074. ref_vector<Sensor> m_sensors;
  1075. ref_vector<Emitter> m_emitters;
  1076. ref_vector<ConfigurableObject> m_objects;
  1077. ref_vector<NetworkedObject> m_netObjects;
  1078. ref_vector<Subsurface> m_ssIntegrators;
  1079. ref_vector<Medium> m_media;
  1080. std::vector<TriMesh *> m_meshes;
  1081. fs::path *m_sourceFile;
  1082. fs::path *m_destinationFile;
  1083. DiscreteDistribution m_emitterPDF;
  1084. AABB m_aabb;
  1085. uint32_t m_blockSize;
  1086. bool m_degenerateSensor;
  1087. bool m_degenerateEmitters;
  1088. };
  1089. MTS_NAMESPACE_END
  1090. #include <mitsuba/render/records.inl>
  1091. #endif /* __MITSUBA_RENDER_SCENE_H_ */