Commit ed74f20d authored by Csaba Pinter's avatar Csaba Pinter

ENH: Add functions to set VR physical geometry with matrix

- Get/SetPhysicalToWorldMatrix functions added to vtkOpenVRRenderWindow to handle the PhysicalTranslation etc. camera related geometry changes as a matrix
- Refactored UpdateHMDMatrixPose code so that it is more understandable what happens under the hood when the camera is set. It works the very same way, but it is now better formalized and readable.
Here's how setting the camera works:
Scaling is applied through moving the camera closer to the focal point, because scaling of all actors is not feasible, and vtkCamera::ModelTransformMatrix is not supported throughout VTK (clipping issues etc.). To achieve this, a new coordinate system called NonScaledWorld is introduced. The relationship between Physical (in which the HMD pose is given by OpenVR) and NonScaledWorld is described by the PhysicalViewUp etc. member variables. After getting the HMD pose in Physical, those coordinates and axes are converted to the NonScaledWorld coordinate system, on which the PhysicalScaling trick of modifying the camera position is applied, resulting the World coordinate system.
- Renamed incorrect distance variable to physical scale
parent b2368075
......@@ -246,7 +246,7 @@ void vtkInteractorStyle3D::Dolly3D(vtkEventData *ed)
double *trans = rwi->GetPhysicalTranslation(
this->CurrentRenderer->GetActiveCamera());
double distance = rwi->GetPhysicalScale();
double physicalScale = rwi->GetPhysicalScale();
// The world coordinate speed of
// movement can be determined from the camera scale.
......@@ -262,9 +262,9 @@ void vtkInteractorStyle3D::Dolly3D(vtkEventData *ed)
double factor = tpos[1]*2.0*this->DollyMotionFactor/90.0;
rwi->SetPhysicalTranslation(
this->CurrentRenderer->GetActiveCamera(),
trans[0]-vdir[0]*factor*distance,
trans[1]-vdir[1]*factor*distance,
trans[2]-vdir[2]*factor*distance);
trans[0]-vdir[0]*factor*physicalScale,
trans[1]-vdir[1]*factor*physicalScale,
trans[2]-vdir[2]*factor*physicalScale);
if (this->AutoAdjustCameraClippingRange)
{
......@@ -272,40 +272,37 @@ void vtkInteractorStyle3D::Dolly3D(vtkEventData *ed)
}
}
void vtkInteractorStyle3D::SetScale(vtkCamera *camera, double newDistance)
void vtkInteractorStyle3D::SetScale(vtkCamera *camera, double newScale)
{
vtkRenderWindowInteractor3D *rwi =
static_cast<vtkRenderWindowInteractor3D *>(this->Interactor);
double *trans = rwi->GetPhysicalTranslation(camera);
double distance = rwi->GetPhysicalScale();
double physicalScale = rwi->GetPhysicalScale();
double *dop = camera->GetDirectionOfProjection();
double *pos = camera->GetPosition();
double hmd[3];
hmd[0] = (pos[0] + trans[0])/distance;
hmd[1] = (pos[1] + trans[1])/distance;
hmd[2] = (pos[2] + trans[2])/distance;
// cerr << "dyf " << dyf << "\n";
// rwi->SetPhysicalTranslation(camera,
// trans[0], trans[1] - distance + newDistance, trans[2]);
// trans = rwi->GetPhysicalTranslation(camera);
hmd[0] = (pos[0] + trans[0])/physicalScale;
hmd[1] = (pos[1] + trans[1])/physicalScale;
hmd[2] = (pos[2] + trans[2])/physicalScale;
double newPos[3];
newPos[0] = hmd[0]*newDistance - trans[0];
newPos[1] = hmd[1]*newDistance - trans[1];
newPos[2] = hmd[2]*newDistance - trans[2];
newPos[0] = hmd[0]*newScale - trans[0];
newPos[1] = hmd[1]*newScale - trans[1];
newPos[2] = hmd[2]*newScale - trans[2];
// Note: New camera properties are overridden by virtual reality render
// window if head-mounted display is tracked
camera->SetFocalPoint(
newPos[0] + dop[0]*newDistance,
newPos[1] + dop[1]*newDistance,
newPos[2] + dop[2]*newDistance);
newPos[0] + dop[0]*newScale,
newPos[1] + dop[1]*newScale,
newPos[2] + dop[2]*newScale);
camera->SetPosition(
newPos[0],
newPos[1],
newPos[2]);
rwi->SetPhysicalScale(newDistance);
rwi->SetPhysicalScale(newScale);
if (this->AutoAdjustCameraClippingRange && this->CurrentRenderer)
{
......
......@@ -89,13 +89,11 @@ public:
//@}
/**
* Set the distance for the camera. The distance
* in VR represents the scaling from world
* to physical space. So when we set it to a new
* value we also adjust the HMD position to maintain
* the same relative position.
* Set the scaling factor from world to physical space.
* In VR when we set it to a new value we also adjust the
* HMD position to maintain the same relative position.
*/
void SetScale(vtkCamera *cam, double distance);
virtual void SetScale(vtkCamera *cam, double newScale);
/**
* Get the interaction picker
......
......@@ -122,7 +122,7 @@ void vtkOpenVRCamera::GetHMDEyeProjections(vtkRenderer *ren)
void vtkOpenVRCamera::ApplyEyePose(vtkOpenVRRenderWindow *win, bool left, double factor)
{
double distance = win->GetPhysicalScale();
double physicalScale = win->GetPhysicalScale();
double *dop = this->GetDirectionOfProjection();
double *vup = this->GetViewUp();
......@@ -131,9 +131,9 @@ void vtkOpenVRCamera::ApplyEyePose(vtkOpenVRRenderWindow *win, bool left, double
double *offset = (left ? this->LeftEyePose : this->RightEyePose);
double newOffset[3];
newOffset[0] = factor*(offset[0]*vright[0] + offset[1]*vup[0] - offset[2]*dop[0])*distance;
newOffset[1] = factor*(offset[0]*vright[1] + offset[1]*vup[1] - offset[2]*dop[1])*distance;
newOffset[2] = factor*(offset[0]*vright[2] + offset[1]*vup[2] - offset[2]*dop[2])*distance;
newOffset[0] = factor*(offset[0]*vright[0] + offset[1]*vup[0] - offset[2]*dop[0])*physicalScale;
newOffset[1] = factor*(offset[0]*vright[1] + offset[1]*vup[1] - offset[2]*dop[1])*physicalScale;
newOffset[2] = factor*(offset[0]*vright[2] + offset[1]*vup[2] - offset[2]*dop[2])*physicalScale;
double *pos = this->GetPosition();
this->SetPosition(pos[0]+newOffset[0], pos[1] + newOffset[1], pos[2] + newOffset[2]);
double *fp = this->GetFocalPoint();
......
......@@ -655,12 +655,12 @@ void vtkOpenVRInteractorStyle::OnPan()
rwi->GetTranslation3D()[2] - rwi->GetLastTranslation3D()[2]};
double *ptrans = rwi->GetPhysicalTranslation(camera);
double distance = rwi->GetPhysicalScale();
double physicalScale = rwi->GetPhysicalScale();
rwi->SetPhysicalTranslation(camera,
ptrans[0] + t[0] * distance,
ptrans[1] + t[1] * distance,
ptrans[2] + t[2] * distance);
ptrans[0] + t[0] * physicalScale,
ptrans[1] + t[1] * physicalScale,
ptrans[2] + t[2] * physicalScale);
// clean up
if (this->Interactor->GetLightFollowCamera())
......@@ -694,9 +694,9 @@ void vtkOpenVRInteractorStyle::OnPinch()
vtkCamera *camera = this->CurrentRenderer->GetActiveCamera();
vtkRenderWindowInteractor3D *rwi =
static_cast<vtkRenderWindowInteractor3D *>(this->Interactor);
double distance = rwi->GetPhysicalScale();
double physicalScale = rwi->GetPhysicalScale();
this->SetScale(camera, distance / dyf);
this->SetScale(camera, physicalScale / dyf);
}
}
......
......@@ -21,6 +21,8 @@ https://github.com/ValveSoftware/openvr/blob/master/LICENSE
#include "vtkCommand.h"
#include "vtkFloatArray.h"
#include "vtkIdList.h"
#include "vtkMath.h"
#include "vtkMatrix4x4.h"
#include "vtkNew.h"
#include "vtkObjectFactory.h"
#include "vtkOpenGLError.h"
......@@ -397,62 +399,83 @@ void vtkOpenVRRenderWindow::UpdateHMDMatrixPose()
this->HMDTransform->Identity();
// get the position and orientation of the HMD
vr::TrackedDevicePose_t &tdPose =
this->TrackedDevicePose[vr::k_unTrackedDeviceIndex_Hmd];
double pos[3];
// Vive to world axes
double *vup = this->PhysicalViewUp;
double *dop = this->PhysicalViewDirection;
double vright[3];
vtkMath::Cross(dop, vup, vright);
// extract HMD axes
double hvright[3];
hvright[0] = tdPose.mDeviceToAbsoluteTracking.m[0][0];
hvright[1] = tdPose.mDeviceToAbsoluteTracking.m[1][0];
hvright[2] = tdPose.mDeviceToAbsoluteTracking.m[2][0];
double hvup[3];
hvup[0] = tdPose.mDeviceToAbsoluteTracking.m[0][1];
hvup[1] = tdPose.mDeviceToAbsoluteTracking.m[1][1];
hvup[2] = tdPose.mDeviceToAbsoluteTracking.m[2][1];
pos[0] = tdPose.mDeviceToAbsoluteTracking.m[0][3];
pos[1] = tdPose.mDeviceToAbsoluteTracking.m[1][3];
pos[2] = tdPose.mDeviceToAbsoluteTracking.m[2][3];
double distance = this->PhysicalScale;
double *trans = this->PhysicalTranslation;
// convert position to world coordinates
double npos[3];
npos[0] = pos[0]*vright[0] + pos[1]*vup[0] - pos[2]*dop[0];
npos[1] = pos[0]*vright[1] + pos[1]*vup[1] - pos[2]*dop[1];
npos[2] = pos[0]*vright[2] + pos[1]*vup[2] - pos[2]*dop[2];
vr::TrackedDevicePose_t &tdPose = this->TrackedDevicePose[vr::k_unTrackedDeviceIndex_Hmd];
// Note: Scaling is applied through moving the camera closer to the focal point, because
// scaling of all actors is not feasible, and vtkCamera::ModelTransformMatrix is not supported
// throughout VTK (clipping issues etc.). To achieve this, a new coordinate system called
// NonScaledWorld is introduced. The relationship between Physical (in which the HMD pose
// is given by OpenVR) and NonScaledWorld is described by the PhysicalViewUp etc. member variables.
// After getting the HMD pose in Physical, those coordinates and axes are converted to the
// NonScaledWorld coordinate system, on which the PhysicalScaling trick of modifying the
// camera position is applied, resulting the World coordinate system.
// construct physical to non-scaled world axes (scaling is used later to move camera closer)
double physicalZ_NonscaledWorld[3] = { -this->PhysicalViewDirection[0],
-this->PhysicalViewDirection[1],
-this->PhysicalViewDirection[2] };
double* physicalY_NonscaledWorld = this->PhysicalViewUp;
double physicalX_NonscaledWorld[3] = {0.0};
vtkMath::Cross(physicalY_NonscaledWorld, physicalZ_NonscaledWorld, physicalX_NonscaledWorld);
// extract HMD axes and position
double hmdX_Physical[3] = { tdPose.mDeviceToAbsoluteTracking.m[0][0],
tdPose.mDeviceToAbsoluteTracking.m[1][0],
tdPose.mDeviceToAbsoluteTracking.m[2][0] };
double hmdY_Physical[3] = { tdPose.mDeviceToAbsoluteTracking.m[0][1],
tdPose.mDeviceToAbsoluteTracking.m[1][1],
tdPose.mDeviceToAbsoluteTracking.m[2][1] };
double hmdZ_Physical[3] = {0.0};
double hmdPosition_Physical[3] = { tdPose.mDeviceToAbsoluteTracking.m[0][3],
tdPose.mDeviceToAbsoluteTracking.m[1][3],
tdPose.mDeviceToAbsoluteTracking.m[2][3] };
// convert position to non-scaled world coordinates
double hmdPosition_NonscaledWorld[3];
hmdPosition_NonscaledWorld[0] = hmdPosition_Physical[0]*physicalX_NonscaledWorld[0] +
hmdPosition_Physical[1]*physicalY_NonscaledWorld[0] +
hmdPosition_Physical[2]*physicalZ_NonscaledWorld[0];
hmdPosition_NonscaledWorld[1] = hmdPosition_Physical[0]*physicalX_NonscaledWorld[1] +
hmdPosition_Physical[1]*physicalY_NonscaledWorld[1] +
hmdPosition_Physical[2]*physicalZ_NonscaledWorld[1];
hmdPosition_NonscaledWorld[2] = hmdPosition_Physical[0]*physicalX_NonscaledWorld[2] +
hmdPosition_Physical[1]*physicalY_NonscaledWorld[2] +
hmdPosition_Physical[2]*physicalZ_NonscaledWorld[2];
// now adjust for scale and translation
double hmdPosition_World[3] = {0.0};
for (int i = 0; i < 3; i++)
{
pos[i] = npos[i]*distance - trans[i];
hmdPosition_World[i] = hmdPosition_NonscaledWorld[i]*this->PhysicalScale - this->PhysicalTranslation[i];
}
// convert axes to world coordinates
double fvright[3]; // final vright
fvright[0] = hvright[0]*vright[0] + hvright[1]*vup[0] - hvright[2]*dop[0];
fvright[1] = hvright[0]*vright[1] + hvright[1]*vup[1] - hvright[2]*dop[1];
fvright[2] = hvright[0]*vright[2] + hvright[1]*vup[2] - hvright[2]*dop[2];
double fvup[3]; // final vup
fvup[0] = hvup[0]*vright[0] + hvup[1]*vup[0] - hvup[2]*dop[0];
fvup[1] = hvup[0]*vright[1] + hvup[1]*vup[1] - hvup[2]*dop[1];
fvup[2] = hvup[0]*vright[2] + hvup[1]*vup[2] - hvup[2]*dop[2];
double fdop[3];
vtkMath::Cross(fvup, fvright, fdop);
cam->SetPosition(pos);
cam->SetFocalPoint(
pos[0] + fdop[0]*distance,
pos[1] + fdop[1]*distance,
pos[2] + fdop[2]*distance);
cam->SetViewUp(fvup);
// convert axes to non-scaled world coordinate system
double hmdX_NonscaledWorld[3] = { hmdX_Physical[0]*physicalX_NonscaledWorld[0] +
hmdX_Physical[1]*physicalY_NonscaledWorld[0] +
hmdX_Physical[2]*physicalZ_NonscaledWorld[0],
hmdX_Physical[0]*physicalX_NonscaledWorld[1] +
hmdX_Physical[1]*physicalY_NonscaledWorld[1] +
hmdX_Physical[2]*physicalZ_NonscaledWorld[1],
hmdX_Physical[0]*physicalX_NonscaledWorld[2] +
hmdX_Physical[1]*physicalY_NonscaledWorld[2] +
hmdX_Physical[2]*physicalZ_NonscaledWorld[2] };
double hmdY_NonscaledWorld[3] = { hmdY_Physical[0]*physicalX_NonscaledWorld[0] +
hmdY_Physical[1]*physicalY_NonscaledWorld[0] +
hmdY_Physical[2]*physicalZ_NonscaledWorld[0],
hmdY_Physical[0]*physicalX_NonscaledWorld[1] +
hmdY_Physical[1]*physicalY_NonscaledWorld[1] +
hmdY_Physical[2]*physicalZ_NonscaledWorld[1],
hmdY_Physical[0]*physicalX_NonscaledWorld[2] +
hmdY_Physical[1]*physicalY_NonscaledWorld[2] +
hmdY_Physical[2]*physicalZ_NonscaledWorld[2] };
double hmdZ_NonscaledWorld[3] = {0.0};
vtkMath::Cross(hmdY_NonscaledWorld, hmdX_NonscaledWorld, hmdZ_NonscaledWorld);
cam->SetPosition(hmdPosition_World);
cam->SetFocalPoint( hmdPosition_World[0] + hmdZ_NonscaledWorld[0]*this->PhysicalScale,
hmdPosition_World[1] + hmdZ_NonscaledWorld[1]*this->PhysicalScale,
hmdPosition_World[2] + hmdZ_NonscaledWorld[2]*this->PhysicalScale );
cam->SetViewUp(hmdY_NonscaledWorld);
ren->UpdateLightsGeometryToFollowCamera();
}
}
......@@ -590,7 +613,6 @@ bool vtkOpenVRRenderWindow::CreateFrameBuffer( int nWidth, int nHeight, Framebuf
// Initialize the rendering window.
void vtkOpenVRRenderWindow::Initialize (void)
{
// Loading the SteamVR Runtime
vr::EVRInitError eError = vr::VRInitError_None;
this->HMD = vr::VR_Init( &eError, vr::VRApplication_Scene );
......@@ -764,3 +786,55 @@ void vtkOpenVRRenderWindow::GetTrackedDevicePose(
*pose = &(this->TrackedDevicePose[idx]);
}
}
void vtkOpenVRRenderWindow::SetPhysicalToWorldMatrix(vtkMatrix4x4* matrix)
{
if (!matrix)
{
return;
}
vtkNew<vtkTransform> hmdToWorldTransform;
hmdToWorldTransform->SetMatrix(matrix);
hmdToWorldTransform->GetPosition(this->PhysicalTranslation);
double scale[3] = {0.0};
hmdToWorldTransform->GetScale(scale);
this->PhysicalScale = scale[0];
this->PhysicalViewUp[0] = matrix->GetElement(0,1);
this->PhysicalViewUp[1] = matrix->GetElement(1,1);
this->PhysicalViewUp[2] = matrix->GetElement(2,1);
vtkMath::Normalize(this->PhysicalViewUp);
this->PhysicalViewDirection[0] = (-1.0) * matrix->GetElement(0,2);
this->PhysicalViewDirection[1] = (-1.0) * matrix->GetElement(1,2);
this->PhysicalViewDirection[2] = (-1.0) * matrix->GetElement(2,2);
vtkMath::Normalize(this->PhysicalViewDirection);
}
void vtkOpenVRRenderWindow::GetPhysicalToWorldMatrix(vtkMatrix4x4* physicalToWorldMatrix)
{
if (!physicalToWorldMatrix)
{
return;
}
physicalToWorldMatrix->Identity();
// construct physical to non-scaled world axes (scaling is applied later)
double physicalZ_NonscaledWorld[3] = { -this->PhysicalViewDirection[0],
-this->PhysicalViewDirection[1],
-this->PhysicalViewDirection[2] };
double* physicalY_NonscaledWorld = this->PhysicalViewUp;
double physicalX_NonscaledWorld[3] = {0.0};
vtkMath::Cross(physicalY_NonscaledWorld, physicalZ_NonscaledWorld, physicalX_NonscaledWorld);
for (int row=0;row<3;++row)
{
physicalToWorldMatrix->SetElement(row, 0, physicalX_NonscaledWorld[row]*this->PhysicalScale);
physicalToWorldMatrix->SetElement(row, 1, physicalY_NonscaledWorld[row]*this->PhysicalScale);
physicalToWorldMatrix->SetElement(row, 2, physicalZ_NonscaledWorld[row]*this->PhysicalScale);
physicalToWorldMatrix->SetElement(row, 3, this->PhysicalTranslation[row]);
}
}
......@@ -49,6 +49,7 @@ PURPOSE. See the above copyright notice for more information.
#include "vtkEventData.h" // for enums
class vtkCamera;
class vtkMatrix4x4;
class vtkOpenVRModel;
class vtkOpenVROverlay;
class vtkOpenGLVertexBufferObject;
......@@ -80,7 +81,9 @@ public:
//@}
/**
* Update the HMD pose
* Update the HMD pose based on hardware pose and physical to world transform.
* VR camera properties are directly modified based on physical to world to
* simulate \sa PhysicalTranslation, \sa PhysicalScale, etc.
*/
void UpdateHMDMatrixPose();
......@@ -111,7 +114,7 @@ public:
return this->TrackedDeviceToRenderModel[idx]; };
/**
*Get the openVR Render Models
* Get the openVR Render Models
*/
vr::IVRRenderModels * GetOpenVRRenderModels() {
return this->OpenVRRenderModels; };
......@@ -129,7 +132,7 @@ public:
return this->TrackedDevicePose[idx]; };
/**
* Initialize the Vive to World setting and camera settings so
* Initialize the HMD to World setting and camera settings so
* that the VR world view most closely matched the view from
* the provided camera. This method is useful for initialing
* a VR world from an existing on screen window and camera.
......@@ -140,7 +143,7 @@ public:
//@{
/**
* Control the Vive to World transformations. IN
* Control the HMD to World transformations. In
* some cases users may not want the Y axis to be up
* and these methods allow them to control it.
*/
......@@ -154,6 +157,18 @@ public:
vtkGetMacro(PhysicalScale, double);
//@}
/**
* Set physical to world transform matrix. Members calculated and set from the matrix:
* \sa PhysicalViewDirection, \sa PhysicalViewUp, \sa PhysicalTranslation, \sa PhysicalScale
* The x axis scale is used for \sa PhysicalScale
*/
void SetPhysicalToWorldMatrix(vtkMatrix4x4* matrix);
/**
* Get physical to world transform matrix. Members used to calculate the matrix:
* \sa PhysicalViewDirection, \sa PhysicalViewUp, \sa PhysicalTranslation, \sa PhysicalScale
*/
void GetPhysicalToWorldMatrix(vtkMatrix4x4* matrix);
//@{
/**
* When on the camera will track the HMD position.
......
......@@ -97,7 +97,7 @@ void vtkOpenVRRenderWindowInteractor::ConvertPoseToWorldCoordinates(
{
vtkOpenVRRenderWindow *win =
vtkOpenVRRenderWindow::SafeDownCast(this->RenderWindow);
double distance = win->GetPhysicalScale();
double physicalScale = win->GetPhysicalScale();
double *trans = win->GetPhysicalTranslation();
// Vive to world axes
......@@ -129,7 +129,7 @@ void vtkOpenVRRenderWindowInteractor::ConvertPoseToWorldCoordinates(
// now adjust for scale and translation
for (int i = 0; i < 3; i++)
{
pos[i] = ppos[i]*distance - trans[i];
pos[i] = ppos[i]*physicalScale - trans[i];
}
// convert axes to world coordinates
......
......@@ -96,7 +96,7 @@ void vtkOpenVRRenderer::DeviceRender()
vtkOpenVRRenderWindow *win =
static_cast<vtkOpenVRRenderWindow *>(this->GetRenderWindow());
double distance = win->GetPhysicalScale();
double physicalScale = win->GetPhysicalScale();
double trans[3];
win->GetPhysicalTranslation(trans);
......@@ -113,7 +113,7 @@ void vtkOpenVRRenderer::DeviceRender()
static_cast<vtkTransform *>(this->FloorActor->GetUserTransform())->Identity();
static_cast<vtkTransform *>(this->FloorActor->GetUserTransform())->Translate(-trans[0], -trans[1], -trans[2]);
static_cast<vtkTransform *>(this->FloorActor->GetUserTransform())->Scale(distance, distance, distance);
static_cast<vtkTransform *>(this->FloorActor->GetUserTransform())->Scale(physicalScale, physicalScale, physicalScale);
static_cast<vtkTransform *>(this->FloorActor->GetUserTransform())->Concatenate(rot);
}
this->Superclass::DeviceRender();
......@@ -302,7 +302,7 @@ void vtkOpenVRRenderer::ResetCameraClippingRange( double bounds[6] )
vtkOpenVRRenderWindow *win =
static_cast<vtkOpenVRRenderWindow *>(this->GetRenderWindow());
win->GetPhysicalTranslation(trans);
double distance = win->GetPhysicalScale();
double physicalScale = win->GetPhysicalScale();
range[0] = 0.2; // 20 cm in front of HMD
range[1] = 0.0;
......@@ -323,7 +323,7 @@ void vtkOpenVRRenderer::ResetCameraClippingRange( double bounds[6] )
}
}
range[1] /= distance; // convert to physical scale
range[1] /= physicalScale; // convert to physical scale
range[1] += 3.0; // add 3 meters for room to walk around
// to see transmitters make sure far is at least 10 meters
......@@ -332,7 +332,7 @@ void vtkOpenVRRenderer::ResetCameraClippingRange( double bounds[6] )
range[1] = 10.0;
}
this->ActiveCamera->SetClippingRange( range[0]*distance, range[1]*distance );
this->ActiveCamera->SetClippingRange( range[0]*physicalScale, range[1]*physicalScale );
}
void vtkOpenVRRenderer::PrintSelf(ostream& os, vtkIndent indent)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment