added field integrator; bugfixes

metadata
Wenzel Jakob 2014-02-17 15:05:17 +01:00
parent e1080ff7b8
commit 399d7b2173
7 changed files with 222 additions and 15 deletions

View File

@ -513,7 +513,6 @@
author={Kelemen, C. and Szirmay-Kalos, L. and Antal, G. and Csonka, F.},
booktitle={Computer Graphics Forum},
volume={21},
number={3},
pages={531--540},
year={2002}
}

View File

@ -206,7 +206,7 @@ public:
/// Create a clone of the entire image block
ref<ImageBlock> clone() const {
ref<ImageBlock> clone = new ImageBlock(m_bitmap->getPixelFormat(),
m_bitmap->getSize() - Vector2i(2*m_borderSize, 2*m_borderSize), m_filter);
m_bitmap->getSize() - Vector2i(2*m_borderSize, 2*m_borderSize), m_filter, m_bitmap->getChannelCount());
copyTo(clone);
return clone;
}

View File

@ -264,7 +264,7 @@ public:
Imf::ChannelList &channels = header.channels();
for (size_t i=0; i<m_channelNames.size(); ++i)
channels.insert(m_channelNames[i], Imf::Channel(compType));
channels.insert(m_channelNames[i].c_str(), Imf::Channel(compType));
m_output = new Imf::TiledOutputFile(filename.string().c_str(), header);
m_frameBuffer = new Imf::FrameBuffer();
@ -287,7 +287,7 @@ public:
char *ptr = (char *) m_tile->getUInt8Data();
for (size_t i=0; i<m_channelNames.size(); ++i) {
m_frameBuffer->insert(m_channelNames[i],
m_frameBuffer->insert(m_channelNames[i].c_str(),
Imf::Slice(compType, ptr, m_pixelStride, m_rowStride));
ptr += compStride;
}
@ -332,7 +332,6 @@ public:
++m_peakUsage;
}
uint32_t idx = (uint32_t) x + (uint32_t) y * m_blocksH;
m_origBlocks[idx] = copy1;
m_mergedBlocks[idx] = copy2;

View File

@ -30,6 +30,7 @@ add_integrator(adaptive misc/adaptive.cpp)
add_integrator(irrcache misc/irrcache.cpp
misc/irrcache_proc.h misc/irrcache_proc.cpp)
add_integrator(multichannel misc/multichannel.cpp)
add_integrator(field misc/field.cpp)
# Bidirectional techniques
add_bidir(bdpt bdpt/bdpt.h bdpt/bdpt.cpp

View File

@ -17,6 +17,7 @@ plugins += env.SharedLibrary('vpl', ['vpl/vpl.cpp'])
plugins += env.SharedLibrary('adaptive', ['misc/adaptive.cpp'])
plugins += env.SharedLibrary('irrcache', ['misc/irrcache.cpp', 'misc/irrcache_proc.cpp'])
plugins += env.SharedLibrary('multichannel', ['misc/multichannel.cpp'])
plugins += env.SharedLibrary('field', ['misc/field.cpp'])
# Bidirectional techniques
bidirEnv = env.Clone()

View File

@ -0,0 +1,170 @@
/*
This file is part of Mitsuba, a physically based rendering system.
Copyright (c) 2007-2012 by Wenzel Jakob and others.
Mitsuba is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License Version 3
as published by the Free Software Foundation.
Mitsuba is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <mitsuba/render/scene.h>
#include <mitsuba/render/renderproc.h>
MTS_NAMESPACE_BEGIN
/*!\plugin{field}{Field extraction integrator}
* \order{17}
* \parameters{
* \parameter{field}{\String}{Denotes the name of the field that should be extracted.
* The following choices are possible:
* \begin{itemize}
* \setlength{\itemsep}{1pt}
* \setlength{\parskip}{1pt}
* \item \code{position}: 3D position in world space
* \item \code{relPosition}: 3D position in camera space
* \item \code{distance}: Ray distance to the shading point
* \item \code{geoNormal}: Geometric surface normal
* \item \code{shNormal}: Shading surface normal
* \item \code{uv}: UV coordinate value
* \item \code{shapeIndex}: Integer index of the high-level shape
* \item \code{primIndex}: Integer shape primitive index
* \end{itemize}
* }
* }
*
* This integrator extracts a requested field of from the intersection records of shading
* points and converts the resulting data into color values. It is meant to be used in conjunction with
* \pluginref{multichannel} to dump auxiliary information (such as depth or surface normals
* of surfaces seen by the camera) into extra channels of a rendered image, for instance to
* create benchmark data for computer vision applications.
* Please refer to the documentation of \pluginref{multichannel} for an example.
*/
class FieldIntegrator : public SamplingIntegrator {
public:
enum EField {
EPosition,
ERelativePosition,
EDistance,
EGeometricNormal,
EShadingNormal,
EUV,
EShapeIndex,
EPrimIndex
};
FieldIntegrator(const Properties &props) : SamplingIntegrator(props) {
std::string field = props.getString("field");
if (field == "position") {
m_field = EPosition;
} else if (field == "relPosition") {
m_field = ERelativePosition;
} else if (field == "distance") {
m_field = EDistance;
} else if (field == "geoNormal") {
m_field = EGeometricNormal;
} else if (field == "shNormal") {
m_field = EShadingNormal;
} else if (field == "uv") {
m_field = EUV;
} else if (field == "shapeIndex") {
m_field = EShapeIndex;
} else if (field == "primIndex") {
m_field = EPrimIndex;
} else {
Log(EError, "Invalid 'field' parameter. Must be one of 'position', "
"'relPosition', 'distance', 'geoNormal', 'shNormal', "
"'primIndex', 'shapeIndex', or 'uv'!");
}
if (SPECTRUM_SAMPLES != 3 && (m_field == EUV || m_field == EShadingNormal || m_field == EGeometricNormal
|| m_field == ERelativePosition || m_field == EPosition)) {
Log(EError, "The field integrator implementation requires renderings to be done in RGB when "
"extracting positional data or surface normals / UV coordinates.");
}
}
FieldIntegrator(Stream *stream, InstanceManager *manager)
: SamplingIntegrator(stream, manager) {
m_field = (EField) stream->readInt();
}
void serialize(Stream *stream, InstanceManager *manager) const {
SamplingIntegrator::serialize(stream, manager);
stream->writeInt((int) m_field);
}
Spectrum Li(const RayDifferential &ray, RadianceQueryRecord &rRec) const {
Spectrum result(0.0f);
if (!rRec.rayIntersect(ray))
return Spectrum(0.0f);
Intersection &its = rRec.its;
switch (m_field) {
case EPosition:
result.fromLinearRGB(its.p.x, its.p.y, its.p.z);
break;
case ERelativePosition: {
const Sensor *sensor = rRec.scene->getSensor();
const Transform &t = sensor->getWorldTransform()->eval(its.t).inverse();
Point p = t(its.p);
result.fromLinearRGB(p.x, p.y, p.z);
}
break;
case EDistance:
result = Spectrum(its.t);
break;
case EGeometricNormal:
result.fromLinearRGB(its.geoFrame.n.x, its.geoFrame.n.y, its.geoFrame.n.z);
break;
case EShadingNormal:
result.fromLinearRGB(its.shFrame.n.x, its.shFrame.n.y, its.shFrame.n.z);
break;
case EUV:
result.fromLinearRGB(its.uv.x, its.uv.y, 0);
break;
case EShapeIndex: {
const ref_vector<Shape> &shapes = rRec.scene->getShapes();
result = Spectrum((Float) -1);
for (size_t i=0; i<shapes.size(); ++i) {
if (shapes[i] == its.shape) {
result = Spectrum((Float) i);
break;
}
}
}
break;
case EPrimIndex:
result = Spectrum((int) its.primIndex);
break;
default:
Log(EError, "Internal error!");
}
return result;
}
std::string toString() const {
return "FieldIntegrator[]";
}
MTS_DECLARE_CLASS()
private:
EField m_field;
};
MTS_IMPLEMENT_CLASS_S(FieldIntegrator, false, SamplingIntegrator)
MTS_EXPORT_PLUGIN(FieldIntegrator, "Field extraction integrator");
MTS_NAMESPACE_END

View File

@ -21,28 +21,63 @@
MTS_NAMESPACE_BEGIN
/*! \plugin{direct}{Multi-channel integrator}
/*!\plugin{multichannel}{Multi-channel integrator}
* \order{16}
* \parameters{
* \parameter{\Unnamed}{\BSDF}{Multiple sub-integrators whose output
* \parameters{
* \parameter{\Unnamed}{\Integrator}{One or more sub-integrators whose output
* should be rendered into a combined multi-channel image}
* }
*
* The multi-channel integrator groups several sub-integrators together
* and invokes them at the same time for each pixel; the result is written
* to a general multi-channel image. The most common application of this plugin
* is to create additional image channels storing surface normals or the distance
* and invokes them at the same time for each pixel; the result from each
* integrator is written into a separate channel of the output image.
* This could include things like surface normals or the distance
* from the camera (via the \pluginref{field} plugin) or ambient occlusion
* (via the \pluginref{ao} plugin).
* In this way, this integrator can be a powerful tool for unusual applications
* of Mitsuba, e.g. to create reference data for computer vision algorithms. Currently, it only
* works with a subset of the other plugins---see the red box for details.
*
* This is a fairly advanced plugin that only plays well with a small
* part of Mitsuba---see the remarks in the red box for details.
* Thee \code{multichannel} plugin also disables certain checks for negative or infinite
* radiance values during rendering that normally cause warnings to be emitted.
* This is simply to process extracted fields for which it is fine
* to take on such values.
*
* The following example contains a typical setup for rendering an 7 channel EXR image:
* 3 for a path traced image (RGB), 3 for surface normals
* (encoded as RGB), and 1 channel for the ray distance measured from the camera.
*
* \vspace{2mm}
* \begin{xml}
* <scene>
* <integrator type="multichannel">
* <integrator type="path"/>
* <integrator type="field">
* <string name="field" value="shNormal"/>
* </integrator>
* <integrator type="field">
* <string name="field" value="distance"/>
* </integrator>
* </integrator>
*
* <sensor type="perspective">
* <sampler type="halton">
* <integer name="sampleCount" value="32"/>
* </sampler>
* <film type="hdrfilm">
* <string name="pixelFormat" value="rgb, rgb, luminance"/>
* <string name="channelNames" value="color, normal, distance"/>
* </film>
* </sensor>
* <!-- **** scene contents **** -->
* </scene>
* \end{xml}
*
* \remarks{
* \item Requires the \pluginref{hdrfilm} or \pluginref{tiledhdrfilm}.
* \item All nested integrators must
* conform to Mitsuba's basic \emph{SamplingIntegrator} interface.
* Currently, only a few of them satisfy this, including:
* Currently, only a few of them do this, including:
* \pluginref{field}, \pluginref{ao}, \pluginref{direct}, \pluginref{path},
* \pluginref{volpath}, \pluginref[volpathsimple]{volpath\_simple},
* and \pluginref{irrcache}.
@ -105,7 +140,9 @@ public:
ref<BlockedRenderProcess> proc = new BlockedRenderProcess(job,
queue, scene->getBlockSize());
proc->setPixelFormat(Bitmap::EMultiSpectrumAlphaWeight, m_integrators.size() * SPECTRUM_SAMPLES + 2, false);
proc->setPixelFormat(
m_integrators.size() > 1 ? Bitmap::EMultiSpectrumAlphaWeight : Bitmap::ESpectrumAlphaWeight,
m_integrators.size() * SPECTRUM_SAMPLES + 2, false);
int integratorResID = sched->registerResource(this);
proc->bindResource("integrator", integratorResID);