added a few more camera bindings, support for motion blur manipulation from python

metadata
Wenzel Jakob 2013-02-17 12:46:33 -05:00
parent 3ed9b6f6f1
commit 2d44b16442
5 changed files with 96 additions and 5 deletions

View File

@ -132,6 +132,12 @@ public:
m_values[i] = concatenateTransformations(m_values[i], value);
}
/// Append a transformation to every entry of this track
void appendTransformation(const ValueType &value) {
for (size_t i=0; i<m_values.size(); ++i)
m_values[i] = concatenateTransformations(value, m_values[i]);
}
/// Serialize to a binary data stream
inline void serialize(Stream *stream) const {
stream->writeUInt(m_type);
@ -259,6 +265,11 @@ template<> inline Vector AnimationTrack<Vector>::concatenateTransformations(
return Vector(value1.x * value2.x, value1.y * value2.y, value1.z * value2.z);
}
template<> inline Point AnimationTrack<Point>::concatenateTransformations(
const Point &value1, const Point &value2) const {
return value1 + value2;
}
template<> inline Float AnimationTrack<Float>::concatenateTransformations(
const Float &value1, const Float &value2) const {
if (m_type == ETranslationX || m_type == ETranslationY || m_type == ETranslationZ)

View File

@ -258,9 +258,15 @@ public:
/// Return the time value of the shutter opening event
inline Float getShutterOpen() const { return m_shutterOpen; }
/// Set the time value of the shutter opening event
void setShutterOpen(Float time) { m_shutterOpen = time; }
/// Return the length, for which the shutter remains open
inline Float getShutterOpenTime() const { return m_shutterOpenTime; }
/// Set the length, for which the shutter remains open
inline void setShutterOpenTime(Float time) { m_shutterOpenTime = time; }
/**
* \brief Does the method \ref sampleRay() require a uniformly distributed
* sample for the time-dependent component?
@ -381,10 +387,16 @@ public:
}
/**
* \brief Overwrite the inverse world-to-view transformation
* \brief Overwrite the view-to-world transformation
* with a static (i.e. non-animated) transformation.
*/
virtual void setInverseViewTransform(const Transform &trafo);
void setInverseViewTransform(const Transform &trafo);
/**
* \brief Overwrite the view-to-world transformation
* with an animated transformation
*/
void setInverseViewTransform(AnimatedTransform *trafo);
/**
* \brief Return a projection matrix suitable for rendering the

View File

@ -327,7 +327,6 @@ Point transform_mul_point(Transform *transform, const Point &point) { return tra
Ray transform_mul_ray(Transform *transform, const Ray &ray) { return transform->operator()(ray); }
Transform transform_mul_transform(Transform *transform, const Transform &other) { return *transform * other; }
bp::object cast(ConfigurableObject *obj) {
const Class *cls = obj->getClass();
#define TryCast(ClassName) if (cls->derivesFrom(MTS_CLASS(ClassName))) \
@ -478,6 +477,16 @@ BOOST_PYTHON_MEMBER_FUNCTION_OVERLOADS(fromLinearRGB_overloads, fromLinearRGB, 3
BOOST_PYTHON_MEMBER_FUNCTION_OVERLOADS(fromXYZ_overloads, fromXYZ, 3, 4)
BOOST_PYTHON_MEMBER_FUNCTION_OVERLOADS(fromIPT_overloads, fromIPT, 3, 4)
#define IMPLEMENT_ANIMATION_TRACK(Name) \
BP_CLASS(Name, AbstractAnimationTrack, (bp::init<AbstractAnimationTrack::EType, size_t>())) \
.def(bp::init<Name *>()) \
.def("reserve", &Name::reserve) \
.def("prependTransformation", &Name::prependTransformation) \
.def("appendTransformation", &Name::appendTransformation) \
.def("eval", &Name::eval, BP_RETURN_VALUE) \
.def("setValue", &Name::setValue) \
.def("getValue", &Name::getValue, BP_RETURN_VALUE) \
.def("append", &Name::append)
void export_core() {
bp::to_python_converter<fs::path, path_to_python_str>();
@ -897,6 +906,54 @@ void export_core() {
.def("isBusy", &Scheduler::isBusy)
.staticmethod("getInstance");
BP_CLASS(AbstractAnimationTrack, Object, bp::no_init)
.def("getType", &AbstractAnimationTrack::getType)
.def("setTime", &AbstractAnimationTrack::setTime)
.def("getTime", &AbstractAnimationTrack::getTime)
.def("getSize", &AbstractAnimationTrack::getSize)
.def("clone", &AbstractAnimationTrack::clone, BP_RETURN_VALUE);
IMPLEMENT_ANIMATION_TRACK(FloatTrack);
IMPLEMENT_ANIMATION_TRACK(VectorTrack);
IMPLEMENT_ANIMATION_TRACK(PointTrack);
IMPLEMENT_ANIMATION_TRACK(QuatTrack);
BP_SETSCOPE(AbstractAnimationTrack_class);
bp::enum_<AbstractAnimationTrack::EType>("EType")
.value("EInvalid", AbstractAnimationTrack::EInvalid)
.value("ETranslationX", AbstractAnimationTrack::ETranslationX)
.value("ETranslationY", AbstractAnimationTrack::ETranslationY)
.value("ETranslationZ", AbstractAnimationTrack::ETranslationZ)
.value("ETranslationXYZ", AbstractAnimationTrack::ETranslationXYZ)
.value("EScaleX", AbstractAnimationTrack::EScaleX)
.value("EScaleY", AbstractAnimationTrack::EScaleY)
.value("EScaleZ", AbstractAnimationTrack::EScaleZ)
.value("EScaleXYZ", AbstractAnimationTrack::EScaleXYZ)
.value("ERotationX", AbstractAnimationTrack::ERotationX)
.value("ERotationY", AbstractAnimationTrack::ERotationY)
.value("ERotationZ", AbstractAnimationTrack::ERotationZ)
.value("ERotationQuat", AbstractAnimationTrack::ERotationQuat)
.export_values();
BP_SETSCOPE(coreModule);
AbstractAnimationTrack *(AnimatedTransform::*animatedTransform_getTrack)(size_t) = &AnimatedTransform::getTrack;
AbstractAnimationTrack *(AnimatedTransform::*animatedTransform_findTrack)(AbstractAnimationTrack::EType) = &AnimatedTransform::findTrack;
BP_CLASS(AnimatedTransform, Object, (bp::init<Transform>()))
.def(bp::init<Stream *>())
.def(bp::init<AnimatedTransform *>())
.def("getTrackCount", &AnimatedTransform::getTrackCount)
.def("findTrack", animatedTransform_findTrack, BP_RETURN_VALUE)
.def("getTrack", animatedTransform_getTrack, BP_RETURN_VALUE)
.def("addTrack", &AnimatedTransform::addTrack)
.def("isStatic", &AnimatedTransform::isStatic)
.def("sortAndSimplify", &AnimatedTransform::sortAndSimplify)
.def("serialize", &AnimatedTransform::serialize)
.def("getTranslationBounds", &AnimatedTransform::getTranslationBounds, BP_RETURN_VALUE)
.def("getSpatialBounds", &AnimatedTransform::getSpatialBounds, BP_RETURN_VALUE)
.def("eval", &AnimatedTransform::eval, BP_RETURN_VALUE);
BP_STRUCT(Spectrum, bp::init<>())
.def("__init__", bp::make_constructor(spectrum_array_constructor))
.def(bp::init<Float>())

View File

@ -353,12 +353,18 @@ void export_render() {
BP_CLASS(Sensor, ConfigurableObject, bp::no_init) // incomplete
.def("getShutterOpen", &Sensor::getShutterOpen)
.def("getShutterOpenTime", &Sensor::getShutterOpenTime);
.def("setShutterOpen", &Sensor::setShutterOpen)
.def("getShutterOpenTime", &Sensor::getShutterOpenTime)
.def("setShutterOpenTime", &Sensor::setShutterOpenTime);
void (ProjectiveCamera::*projectiveCamera_setInverseViewTransform1)(const Transform &) = &ProjectiveCamera::setInverseViewTransform;
void (ProjectiveCamera::*projectiveCamera_setInverseViewTransform2)(AnimatedTransform *) = &ProjectiveCamera::setInverseViewTransform;
BP_CLASS(ProjectiveCamera, Sensor, bp::no_init)
.def("getViewTransform", &ProjectiveCamera::getViewTransform, BP_RETURN_VALUE)
.def("getInverseViewTransform", &ProjectiveCamera::getInverseViewTransform, BP_RETURN_VALUE)
.def("setInverseViewTransform", &ProjectiveCamera::setInverseViewTransform)
.def("setInverseViewTransform", projectiveCamera_setInverseViewTransform1)
.def("setInverseViewTransform", projectiveCamera_setInverseViewTransform2)
.def("getProjectionTransform", &ProjectiveCamera::getProjectionTransform, BP_RETURN_VALUE)
.def("getNearClip", &ProjectiveCamera::getNearClip)
.def("getFarClip", &ProjectiveCamera::getFarClip)

View File

@ -205,6 +205,11 @@ void ProjectiveCamera::setInverseViewTransform(const Transform &trafo) {
m_properties.setTransform("toWorld", trafo, false);
}
void ProjectiveCamera::setInverseViewTransform(AnimatedTransform *trafo) {
m_worldTransform = trafo;
m_properties.setAnimatedTransform("toWorld", trafo, false);
}
PerspectiveCamera::PerspectiveCamera(const Properties &props)
: ProjectiveCamera(props), m_xfov(0.0f) {
props.markQueried("fov");