// Mikroskop, DirectShow
#define WIN32_LEAN_AND_MEAN
#define _WIN32_WINNT 0x0500
#include <windows.h>
#include <dbt.h>
#include <streams.h>
#include <commctrl.h>
#include <tchar.h>
#ifndef UNICODE
# include <stdio.h>
#endif
#include <math.h>
EXTERN_C BYTE _BitScanForward(DWORD*Index, DWORD Mask);
#define elemof(x) (sizeof(x)/sizeof(*(x)))
#define nobreak
#define T(x) TEXT(x)
#define WM_FGNOTIFY (WM_USER+1)
#define WM_MARKINSTANCE (WM_USER+2) // liefert nInstance
#define REGPATH T("Software\\h#s\\Kamera")
#define CLASSNAME REGPATH+9 // wie Registry-Pfad
//------------------------------------------------------------------------------
// Global data
//------------------------------------------------------------------------------
int nInstance;
HINSTANCE ghInstance;
HWND ghwndMain, ghwndStatus, ghwndOverlay;
HDEVNOTIFY ghDevNotify;
TCHAR sDecimal[2];
EXTERN_C void _cdecl _fltused(){};
struct{
ICaptureGraphBuilder2 *pBuilder;
IVideoWindow *pVW;
IMediaEventEx *pME;
IAMStreamConfig *pVSC;
IBaseFilter *pVCap;
IGraphBuilder *pFg;
struct{
SIZE sz;
}Pin;
TCHAR Resolution[32]; // unit per pixel, e.g. "0.0462 mm", always decimal point '.'
bool fPreviewGraphBuilt;
bool fPreviewing;
bool fMediaInterleaved;
IMoniker *pmVideo;
TCHAR FriendlyName[120];
TCHAR VideoDevice[MAX_PATH];
}g;
int statusGetHeight() {
RECT r;
GetWindowRect(ghwndStatus,&r);
return r.bottom-r.top;
}
int _cdecl ErrMsg(UINT MsgId,...) {
TCHAR t[256];
LoadString(0,MsgId,t,elemof(t));
TCHAR buf[256];
va_list va;
va_start(va, MsgId);
_vsntprintf(buf,elemof(buf),t,va);
va_end(va);
return MessageBox(ghwndMain,buf,NULL,MB_OK|MB_ICONEXCLAMATION|MB_TASKMODAL);
}
void IMonRelease(IMoniker *&pm) {
if(pm) {
pm->Release();
pm = 0;
}
}
static void myVideoRect(RECT*r) {
static const int info[]={1,0,1,2,0};
GetEffectiveClientRect(ghwndMain,r,const_cast<int*>(info));
r->right-=r->left; // Breite und Höhe
r->bottom-=r->top;
}
void ResizeWindow(int w, int h) {
static int nRecurse;
RECT rcW, rcC;
int xExtra, yExtra;
int cyBorder = GetSystemMetrics(SM_CYBORDER);
nRecurse++;
GetWindowRect(ghwndMain, &rcW);
GetClientRect(ghwndMain, &rcC);
xExtra = rcW.right - rcW.left - rcC.right;
yExtra = rcW.bottom - rcW.top - rcC.bottom + cyBorder + statusGetHeight();
rcC.right = w;
rcC.bottom = h;
SetWindowPos(ghwndMain, NULL, 0, 0, rcC.right + xExtra,
rcC.bottom + yExtra, SWP_NOZORDER | SWP_NOMOVE);
// we may need to recurse once. But more than that means the window cannot
// be made the size we want, trying will just stack fault.
if (nRecurse==1 && (rcC.right+xExtra != rcW.right-rcW.left && w > GetSystemMetrics(SM_CXMIN) ||
rcC.bottom+yExtra != rcW.bottom-rcW.top))
ResizeWindow(w,h);
nRecurse--;
}
// build the preview graph!
//
// !!! PLEASE NOTE !!! Some new WDM devices have totally separate capture
// and preview settings. An application that wishes to preview and then
// capture may have to set the preview pin format using IAMStreamConfig on the
// preview pin, and then again on the capture pin to capture with that format.
// In this sample app, there is a separate page to set the settings on the
// capture pin and one for the preview pin. To avoid the user
// having to enter the same settings in 2 dialog boxes, an app can have its own
// UI for choosing a format (the possible formats can be enumerated using
// IAMStreamConfig) and then the app can programmatically call IAMStreamConfig
// to set the format on both pins.
//
bool BuildPreviewGraph() {
// we have one already
if (g.fPreviewGraphBuilt) return true;
// No rebuilding while we're running
if (/*g.fCapturing ||*/ g.fPreviewing) return false;
// We don't have the necessary capture filters
if (!g.pVCap) return false;
// Render the preview pin - even if there is not preview pin, the capture
// graph builder will use a smart tee filter and provide a preview.
// !!! what about latency/buffer issues?
// NOTE that we try to render the interleaved pin before the video pin, because
// if BOTH exist, it's a DV filter and the only way to get the audio is to use
// the interleaved pin. Using the Video pin on a DV filter is only useful if
// you don't want the audio.
HRESULT hr = g.fMediaInterleaved
? g.pBuilder->RenderStream(&PIN_CATEGORY_PREVIEW,
&MEDIATYPE_Interleaved, g.pVCap, NULL, NULL)
: g.pBuilder->RenderStream(NULL/*&PIN_CATEGORY_PREVIEW*/,
&MEDIATYPE_Video, g.pVCap, NULL, NULL);
if (hr) ErrMsg(16); //"This graph cannot preview!"
// Get the preview window to be a child of our app's window
// This will find the IVideoWindow interface on the renderer. It is
// important to ask the filtergraph for this interface... do NOT use
// ICaptureGraphBuilder2::FindInterface, because the filtergraph needs to
// know we own the window so it can give us display changed messages, etc.
hr = g.pFg->QueryInterface(IID_IVideoWindow, (void **)&g.pVW);
if (hr!=NOERROR) ErrMsg(17); //"This graph cannot preview properly"
else{
g.pVW->put_Owner((OAHWND)ghwndMain); // We own the window now
g.pVW->put_WindowStyle(WS_CHILD); // you are now a child
g.pVW->put_MessageDrain((OAHWND)ghwndOverlay);
// give the preview window all our space but where the status bar is
RECT r;
myVideoRect(&r);
g.pVW->SetWindowPosition(r.left,r.top,r.right,r.bottom); // be this big
g.pVW->put_Visible(OATRUE);
}
// make sure we process events while we're previewing!
hr = g.pFg->QueryInterface(IID_IMediaEventEx, (void **)&g.pME);
if (hr==NOERROR) g.pME->SetNotifyWindow((OAHWND)ghwndMain, WM_FGNOTIFY, 0);
// All done.
g.fPreviewGraphBuilt = TRUE;
return true;
}
// Tear down everything downstream of a given filter
void NukeDownstream(IBaseFilter *pf) {
IPin *pP, *pTo;
ULONG u;
IEnumPins *pins = NULL;
PIN_INFO pininfo;
HRESULT hr = pf->EnumPins(&pins);
pins->Reset();
while(hr == NOERROR) {
hr = pins->Next(1, &pP, &u);
if(hr == S_OK && pP) {
pP->ConnectedTo(&pTo);
if(pTo) {
hr = pTo->QueryPinInfo(&pininfo);
if(hr == NOERROR) {
if(pininfo.dir == PINDIR_INPUT) {
NukeDownstream(pininfo.pFilter);
g.pFg->Disconnect(pTo);
g.pFg->Disconnect(pP);
g.pFg->RemoveFilter(pininfo.pFilter);
}
pininfo.pFilter->Release();
}
pTo->Release();
}
pP->Release();
}
}
if(pins) pins->Release();
}
// Tear down everything downstream of the capture filters, so we can build
// a different capture graph. Notice that we never destroy the capture filters
// and WDM filters upstream of them, because then all the capture settings
// we've set would be lost.
void TearDownGraph() {
if(g.pVW) { // stop drawing in our window, or we may get wierd repaint effects
g.pVW->put_Owner(NULL);
g.pVW->put_Visible(OAFALSE);
g.pVW->Release();
}
g.pVW = NULL;
if(g.pME) g.pME->Release();
g.pME = NULL;
// destroy the graph downstream of our capture filters
if(g.pVCap) NukeDownstream(g.pVCap);
// potential debug output - what the graph looks like
// if (g.pFg) DumpGraph(g.pFg, 1);
g.fPreviewGraphBuilt = FALSE;
}
// all done with the capture filters and the graph builder
void FreeCapFilters() {
if(g.pFg) g.pFg->Release();
g.pFg = NULL;
if(g.pBuilder) g.pBuilder->Release();
g.pBuilder = NULL;
if(g.pVCap) g.pVCap->Release();
g.pVCap = NULL;
if(g.pVSC) g.pVSC->Release();
g.pVSC = NULL;
}
// Make a graph builder object we can use for capture graph building
bool MakeBuilder() {
// we have one already
if(g.pBuilder) return true;
HRESULT hr = CoCreateInstance((REFCLSID)CLSID_CaptureGraphBuilder2,
NULL, CLSCTX_INPROC, (REFIID)IID_ICaptureGraphBuilder2,
(void **)&g.pBuilder);
return hr==NOERROR;
}
// Make a graph object we can use for capture graph building
bool MakeGraph() {
// we have one already
if (g.pFg) return true;
HRESULT hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC,
IID_IGraphBuilder, (LPVOID *)&g.pFg);
return hr==NOERROR;
}
// Natürliche Videogröße in Pixel erfragen/setzen
static bool getVideoSize(SIZE*sz,bool set=false) {
bool ret=false;
if (g.pVSC) {
AM_MEDIA_TYPE *pmt;
if (!g.pVSC->GetFormat(&pmt) // default capture format
&& pmt->formattype==FORMAT_VideoInfo) { // DV capture does not use a VIDEOINFOHEADER
BITMAPINFOHEADER*bih=HEADER(pmt->pbFormat);
if (set) {
bih->biWidth=sz->cx;
bih->biHeight=bih->biHeight<0?-sz->cy:sz->cy;
ret=!g.pVSC->SetFormat(pmt);
}else{
sz->cx=bih->biWidth;
sz->cy=abs(bih->biHeight);
ret=true;
}
}
DeleteMediaType(pmt);
}
return ret;
}
// Eines von beiden Video-MEDIATYPE-Interfaces beschaffen
static HRESULT getStreamConfig(IAMStreamConfig **pSC) {
HRESULT hr=g.pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Interleaved,
g.pVCap,
IID_IAMStreamConfig,(void**)pSC);
g.fMediaInterleaved=!hr;
if (hr)
hr = g.pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Video, g.pVCap,
IID_IAMStreamConfig,(void**)pSC);
return hr;
}
static bool getFriendlyName(IMoniker*pM,PTSTR s, int slen) {
bool ret=false;
IPropertyBag *pBag;
if (!pM->BindToStorage(0,0,IID_IPropertyBag,(void **)&pBag)) {
VARIANT var;
var.vt = VT_BSTR;
if (!pBag->Read(L"FriendlyName",&var,NULL)) {
#ifdef UNICODE
lstrcpyn(s,var.bstrVal,slen);
#else
WideCharToMultiByte(CP_ACP,0,var.bstrVal,-1,s,slen,NULL,NULL);
#endif
SysFreeString(var.bstrVal);
ret=true;
}
pBag->Release();
}
return ret;
}
// create the capture filters of the graph. We need to keep them loaded from
// the beginning, so we can set parameters on them and have them remembered
bool InitCapFilters() {
HRESULT hr=S_OK;
bool f=MakeBuilder();
if (!f) {
ErrMsg(18); //"Cannot instantiate graph builder"
return false;
}
// First, we need a Video Capture filter, and some interfaces
g.pVCap = NULL;
if (g.pmVideo) {
getFriendlyName(g.pmVideo,g.FriendlyName,elemof(g.FriendlyName));
hr = g.pmVideo->BindToObject(0, 0, IID_IBaseFilter, (void**)&g.pVCap);
}
if (!g.pVCap) {
ErrMsg(19, hr); //"Error %x: Cannot create video capture filter"
goto InitCapFiltersFail;
}
// make a filtergraph, give it to the graph builder and put the video
// capture filter in the graph
f = MakeGraph();
if (!f) {
ErrMsg(20); //"Cannot instantiate filtergraph"
goto InitCapFiltersFail;
}
hr = g.pBuilder->SetFiltergraph(g.pFg);
if (hr!=NOERROR) {
ErrMsg(21); //"Cannot give graph to builder"
goto InitCapFiltersFail;
}
hr = g.pFg->AddFilter(g.pVCap, NULL);
if (hr!=NOERROR) {
ErrMsg(22, hr); //"Error %x: Cannot add vidcap to filtergraph"
goto InitCapFiltersFail;
}
// !!! What if this interface isn't supported?
// we use this interface to set the frame rate and get the capture size
hr=getStreamConfig(&g.pVSC);
if (hr!=NOERROR) ErrMsg(23,hr); // this means we can't set frame rate (non-DV only)
//"Error %x: Cannot find VCapture:IAMStreamConfig"
getVideoSize(&g.Pin.sz,true);
getVideoSize(&g.Pin.sz);
return true;
InitCapFiltersFail:
FreeCapFilters();
return false;
}
// (Graf erstellen und) Videoanzeige starten ODER stoppen
bool StartPreview(bool start=true) {
// way ahead of you
if (g.fPreviewing==start) return true;
if (start) {
if (!g.fPreviewGraphBuilt) BuildPreviewGraph();
if (!g.fPreviewGraphBuilt) return false;
}
// run the graph
IMediaControl *pMC;
HRESULT hr=g.pFg->QueryInterface(IID_IMediaControl,(void **)&pMC);
if (SUCCEEDED(hr)) {
if (start) {
hr = pMC->Run();
if (FAILED(hr)) pMC->Stop(); // stop parts that ran
}else pMC->Stop();
pMC->Release();
}
if (FAILED(hr)) {
ErrMsg(start?24:25, hr); //"Error %x: Cannot run/stop preview graph"
return false;
}
g.fPreviewing = start;
// get rid of menu garbage
if (!start) {
InvalidateRect(ghwndMain,NULL,TRUE);
TearDownGraph(); // graph could prevent dialog working
}
return true;
}
// Let's talk about UI for a minute. There are many programmatic interfaces
// you can use to program a capture filter or related filter to capture the
// way you want it to.... eg: IAMStreamConfig, IAMVideoCompression,
// IAMCrossbar, IAMTVTuner, IAMTVAudio, IAMAnalogVideoDecoder, IAMCameraControl,
// IAMVideoProcAmp, etc.
//
// But you probably want some UI to let the user play with all these settings.
// For new WDM-style capture devices, we offer some default UI you can use.
// The code below shows how to bring up all of the dialog boxes supported
// by any capture filters.
//
// The following code shows you how you can bring up all of the
// dialogs supported by a particular object at once on a big page with lots
// of thumb tabs. You do this by starting with an interface on the object that
// you want, and using ISpecifyPropertyPages to get the whole list, and
// OleCreatePropertyFrame to bring them all up. This way you will get custom
// property pages a filter has, too, that are not one of the standard pages that
// you know about. There are at least 9 objects that may have property pages.
// Your app already has 2 of the object pointers, the video capture filter and
// the audio capture filter (let's call them pVCap and pACap)
// 1. The video capture filter - pVCap
// 2. The video capture filter's capture pin - get this by calling
// FindInterface(&PIN_CATEGORY_CAPTURE, pVCap, IID_IPin, &pX);
// 3. The video capture filter's preview pin - get this by calling
// FindInterface(&PIN_CATEGORY_PREVIEW, pVCap, IID_IPin, &pX);
// 4. The audio capture filter - pACap
// 5. The audio capture filter's capture pin - get this by calling
// FindInterface(&PIN_CATEGORY_CAPTURE, pACap, IID_IPin, &pX);
// 6. The crossbar connected to the video capture filter - get this by calling
// FindInterface(NULL, pVCap, IID_IAMCrossbar, &pX);
// 7. There is a possible second crossbar to control audio - get this by
// looking upstream of the first crossbar like this:
// FindInterface(&LOOK_UPSTREAM_ONLY, pX, IID_IAMCrossbar, &pX2);
// 8. The TV Tuner connected to the video capture filter - get this by calling
// FindInterface(NULL, pVCap, IID_IAMTVTuner, &pX);
// 9. The TV Audio connected to the audio capture filter - get this by calling
// FindInterface(NULL, pACap, IID_IAMTVAudio, &pX);
// 10. We have a helper class, CCrossbar, which makes the crossbar issue less
// confusing. In fact, although not supported here, there may be more than
// two crossbars, arranged in many different ways. An application may not
// wish to have separate dialogs for each crossbar, but instead hide the
// complexity and simply offer the user a list of inputs that can be chosen.
// This list represents all the unique inputs from all the crossbars.
// The crossbar helper class does this and offers that list as #10. It is
// expected that an application will either provide the crossbar dialogs
// above (#6 and #7) OR provide the input list (this #10), but not both.
// That would be confusing because if you select an input using dialog 6 or
// 7 the input list here in #10 won't know about your choice.
//
// Your last choice for UI is to make your own pages, and use the results of
// your custom page to call the interfaces programmatically.
void MakeMenuOptions() {
HMENU hMainMenu=GetMenu(ghwndMain); // Options menu
EnableMenuItem(hMainMenu,42,MF_GRAYED);
EnableMenuItem(hMainMenu,43,MF_GRAYED);
// don't bother looking for new property pages if the old ones are supported
// or if we don't have a capture filter
if (!g.pVCap) return;
// New WDM devices support new UI and new interfaces.
// Your app can use some default property
// pages for UI if you'd like (like we do here) or if you don't like our
// dialog boxes, feel free to make your own and programmatically set
// the capture options through interfaces like IAMCrossbar, IAMCameraControl
// etc.
// There are 9 objects that might support property pages. Let's go through
// them.
// 1. the video capture filter itself
ISpecifyPropertyPages *pSpec;
if (!g.pVCap->QueryInterface(IID_ISpecifyPropertyPages,(void **)&pSpec)) {
CAUUID cauuid;
if (!pSpec->GetPages(&cauuid)) {
if (cauuid.cElems) EnableMenuItem(hMainMenu,42,MF_ENABLED);
CoTaskMemFree(cauuid.pElems);
}
pSpec->Release();
}
// 2. The video capture capture pin
IAMStreamConfig *pSC;
if (!getStreamConfig(&pSC)) {
if (!pSC->QueryInterface(IID_ISpecifyPropertyPages, (void **)&pSpec)) {
CAUUID cauuid;
if (!pSpec->GetPages(&cauuid)) {
if (cauuid.cElems) EnableMenuItem(hMainMenu,43,MF_ENABLED);
CoTaskMemFree(cauuid.pElems);
}
pSpec->Release();
}
pSC->Release();
}
// !!! anything needed to delete the popup when selecting a new input?
}
// Check the devices we're currently using and make filters for them
bool ChooseDevice(IMoniker *pmVideo) {
// they chose a new device. rebuild the graphs
if (g.pmVideo!=pmVideo) {
if (pmVideo) pmVideo->AddRef();
IMonRelease(g.pmVideo);
g.pmVideo = pmVideo;
if (g.fPreviewing) StartPreview(false);
FreeCapFilters();
InitCapFilters();
StartPreview();
MakeMenuOptions(); // the UI choices change per device
}
// Since the GetInfo method failed (or the interface did not exist),
// display the device's friendly name.
SendMessage(ghwndStatus,SB_SETTEXT,1,(LPARAM)g.FriendlyName);
return true;
}
bool ChooseDevice(int i) {
MENUITEMINFO mii;
mii.cbSize=sizeof mii;
mii.fMask=MIIM_DATA;
HMENU hCamMenu=GetSubMenu(GetMenu(ghwndMain),1);
if (!GetMenuItemInfo(hCamMenu,i,TRUE,&mii)) return false;
if (ChooseDevice((IMoniker*)mii.dwItemData)) {
int j=GetMenuItemCount(hCamMenu);
CheckMenuRadioItem(hCamMenu,0,j-1,i,MF_BYPOSITION);
return true;
}
return false;
}
bool ChooseDevice(PCTSTR szVideo) {
#ifdef UNICODE
PCWSTR wszVideo=szVideo;
#else
WCHAR wszVideo[MAX_PATH];
MultiByteToWideChar(0,0,szVideo,-1,wszVideo,elemof(wszVideo));
#endif
IBindCtx *lpBC;
IMoniker *pmVideo=0;
if (!CreateBindCtx(0,&lpBC)) {
DWORD dwEaten;
MkParseDisplayName(lpBC,wszVideo,&dwEaten,&pmVideo);
lpBC->Release();
}
if (!pmVideo) return false; // Das Videogerät existiert nicht (mehr)
// Handle the case where the video capture device used for the previous session
// is not available now.
MENUITEMINFO mii;
mii.cbSize=sizeof mii;
mii.fMask=MIIM_DATA;
HMENU hCamMenu=GetSubMenu(GetMenu(ghwndMain),1);
UINT i;
for (i=0; GetMenuItemInfo(hCamMenu,i,TRUE,&mii); i++) {
IMoniker*v=(IMoniker*)mii.dwItemData;
if (v && !v->IsEqual(pmVideo)) goto found;
}
pmVideo->Release();
return false; // Das gefundene Videogerät ist nicht in der Liste (komisch!!)
found:
ChooseDevice(i);
pmVideo->Release();
return true;
}
static INT_PTR CALLBACK AboutDlgProc(HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam) {
switch(msg) {
case WM_INITDIALOG: return TRUE;
case WM_COMMAND: EndDialog(hwnd,wParam);
}
return FALSE;
}
// put all installed video devices in the submenu
void AddDevicesToMenu(bool reenum=true) {
HMENU hCamMenu=GetSubMenu(GetMenu(ghwndMain),1); // Devices menu
MENUITEMINFO mii;
mii.cbSize=sizeof mii;
mii.fMask=MIIM_DATA;
while (GetMenuItemInfo(hCamMenu,0,TRUE,&mii)) {
((IMoniker*)mii.dwItemData)->Release(); // entsprerren
DeleteMenu(hCamMenu,0,MF_BYPOSITION); // Clean the sub menu
}
if (!reenum) return;
// enumerate all video capture devices
ICreateDevEnum *pCreateDevEnum;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
IID_ICreateDevEnum, (void**)&pCreateDevEnum);
if (hr!=NOERROR) {
ErrMsg(27); //"Error Creating Device Enumerator"
return;
}
IEnumMoniker *pEm;
hr=pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,&pEm,0);
if (hr!=NOERROR) {
ErrMsg(28); //"Sorry, you have no video capture hardware"
return;
}
pEm->Reset();
IMoniker *pM;
mii.fMask|=MIIM_FTYPE|MIIM_STATE|MIIM_ID|MIIM_STRING;
mii.fType=MFT_STRING|MFT_RADIOCHECK;
for (UINT uIndex=0;!pEm->Next(1,&pM,NULL);uIndex++) {
TCHAR FriendlyName[120];
if (getFriendlyName(pM,FriendlyName,elemof(FriendlyName))) {
mii.dwTypeData=FriendlyName;
mii.fState=g.pmVideo&&!g.pmVideo->IsEqual(pM)?MFS_CHECKED:MFS_UNCHECKED;
mii.wID=16+uIndex;
mii.dwTypeData=FriendlyName;
mii.dwItemData=(ULONG_PTR)pM;
AppendMenu(hCamMenu,MF_SEPARATOR,0,0);
SetMenuItemInfo(hCamMenu,uIndex,TRUE,&mii);
}else pM->Release();
}
pEm->Release();
}
static POINT gummi[2]; // Enden der Gummiline, in Kamerapixel
static double pixlen;
static void CalcPixLen() {
pixlen=_hypot(gummi[1].x-gummi[0].x,gummi[1].y-gummi[0].y);
}
static void genLengthString(PTSTR s, int slen) {
double d;
int i;
if (_stscanf(g.Resolution,T("%lg%n"),&d,&i)) {
d*=pixlen;
_sntprintf(s,slen-1,T("%.4g%s"),d,g.Resolution+i);
PTSTR p=_tcschr(s,'.');
if (p) *p=*sDecimal;
}else *s=0;
}
static void UpdateStatusLen() {
TCHAR s[32];
if (pixlen) genLengthString(s,elemof(s));
else *s=0;
SendMessage(ghwndStatus,SB_SETTEXT,0,(LPARAM)s);
}
static INT_PTR CALLBACK CalDlgProc(HWND Wnd, UINT Msg, WPARAM wParam, LPARAM lParam) {
static TCHAR t[64],hResolution[elemof(g.Resolution)];
switch (Msg) {
case WM_INITDIALOG: {
TCHAR s[64],n[32];
GetDlgItemText(Wnd,10,t,elemof(t));
_sntprintf(n,elemof(n),T("%.1f"),pixlen);
PTSTR p=_tcschr(n,'.');
if (p) *p=*sDecimal;
_sntprintf(s,elemof(s),t,n);
SetDlgItemText(Wnd,10,s);
GetDlgItemText(Wnd,12,t,elemof(t));
genLengthString(s,elemof(s));
SetDlgItemText(Wnd,11,s); // Vorgabe setzen, ändert Static #12
lstrcpyn(hResolution,g.Resolution,elemof(hResolution));
}return TRUE;
case WM_USER: {
}break;
case WM_COMMAND: switch (wParam) {
case IDOK: {
lstrcpyn(g.Resolution,hResolution,elemof(g.Resolution));
UpdateStatusLen();
}nobreak;
case IDCANCEL: EndDialog(Wnd,wParam); break;
case MAKELONG(11,EN_UPDATE): {
TCHAR s[64],n[32];
GetWindowText((HWND)lParam,n,elemof(n));
PTSTR p=_tcschr(n,*sDecimal);
if (p) *p='.';
double d;
int i;
if (_stscanf(n,T("%lg%n"),&d,&i)) {
d/=pixlen; // Auflösung auf Pixel bezogen
_sntprintf(hResolution,elemof(hResolution)-1,T("%.5g%s"),d,n+i);
_sntprintf(s,elemof(s),t,hResolution);
p=_tcschr(s,'.');
if (p) *p=*sDecimal;
SetDlgItemText(Wnd,12,s);
}
}break;
}break;
}
return FALSE;
}
/*----------------------------------------------------------------------------*\
| OnCommand()
| Process all of our WM_COMMAND messages.
\*----------------------------------------------------------------------------*/
LONG_PTR OnCommand(HWND Wnd, WPARAM wParam) {
switch (wParam) { // HIWORD(wParam) stets == 0
case 9: DialogBox(ghInstance,MAKEINTRESOURCE(wParam),Wnd,AboutDlgProc); break;
case 2: PostMessage(Wnd,WM_CLOSE,0,0L); break;
// pick which video capture device to use
case 10: { // Originalgröße
ResizeWindow(g.Pin.sz.cx,g.Pin.sz.cy);
}break;
case 11: DialogBox(ghInstance,MAKEINTRESOURCE(wParam),Wnd,CalDlgProc); break;
case 16:
case 16+1:
case 16+2:
case 16+3:
case 16+4:
case 16+5:
case 16+6:
case 16+7:
case 16+8:
case 16+9:
ChooseDevice((int)wParam-16);
break;
case 42: { // video format dialog
ISpecifyPropertyPages *pSpec;
if (!g.pVCap->QueryInterface(IID_ISpecifyPropertyPages,(void **)&pSpec)) {
CAUUID cauuid;
if (!pSpec->GetPages(&cauuid)) {
OleCreatePropertyFrame(ghwndMain, 30, 30, NULL, 1,
(IUnknown**)&g.pVCap, cauuid.cElems,
(GUID*)cauuid.pElems, 0, 0, NULL);
CoTaskMemFree(cauuid.pElems);
}
pSpec->Release();
}
}break;
case 43: {
// You can change this pin's output format in these dialogs.
// If the capture pin is already connected to somebody who's
// fussy about the connection type, that may prevent using
// this dialog(!) because the filter it's connected to might not
// allow reconnecting to a new format. (EG: you switch from RGB
// to some compressed type, and need to pull in a decoder)
// I need to tear down the graph downstream of the
// capture filter before bringing up these dialogs.
// In any case, the graph must be STOPPED when calling them.
StartPreview(false); // make sure graph is stopped
// The capture pin that we are trying to set the format on is connected if
// one of these variable is set to TRUE. The pin should be disconnected for
// the dialog to work properly.
if (g.fPreviewGraphBuilt) {
DbgLog((LOG_TRACE,1,T("Tear down graph for dialog")));
}
ISpecifyPropertyPages *pSpec;
if (!g.pVSC->QueryInterface(IID_ISpecifyPropertyPages,(void**)&pSpec)) {
CAUUID cauuid;
if (!pSpec->GetPages(&cauuid)) {
if (!OleCreatePropertyFrame(ghwndMain, 30, 30, NULL, 1,
(IUnknown**)&g.pVSC, cauuid.cElems,
(GUID*)cauuid.pElems, 0, 0, NULL)) {
// !!! What if changing output formats couldn't reconnect
// and the graph is broken? Shouldn't be possible...
if (getVideoSize(&g.Pin.sz)) ResizeWindow(g.Pin.sz.cx,g.Pin.sz.cy);
}
CoTaskMemFree(cauuid.pElems);
}
pSpec->Release();
}
StartPreview();
}break;
}
return 0;
}
static void LoadConfig() {
HKEY key;
if (!RegOpenKeyEx(HKEY_CURRENT_USER,REGPATH,0,KEY_QUERY_VALUE,&key)) {
HKEY k2;
TCHAR keyname[16];
_sntprintf(keyname,elemof(keyname),T("%d"),nInstance);
if (!RegOpenKeyEx(key,keyname,0,KEY_QUERY_VALUE,&k2)) {
DWORD len=sizeof g.VideoDevice;
RegQueryValueEx(k2,T("VideoDevice"),NULL,NULL,(BYTE*)g.VideoDevice,&len);
WINDOWPLACEMENT wp;
wp.length=sizeof wp;
GetWindowPlacement(ghwndMain,&wp);
len=sizeof wp.rcNormalPosition;
RegQueryValueEx(k2,T("WindowPos"),NULL,NULL,(BYTE*)&wp.rcNormalPosition,&len);
SetWindowPlacement(ghwndMain,&wp);
len=sizeof g.Resolution;
RegQueryValueEx(k2,T("Resolution"),NULL,NULL,(BYTE*)g.Resolution,&len);
len=sizeof g.Pin;
RegQueryValueEx(k2,T("Pin"),NULL,NULL,(BYTE*)&g.Pin,&len);
RegCloseKey(k2);
}
RegCloseKey(key);
}
}
static void SaveConfig() {
HKEY key;
DWORD disp;
if (!RegCreateKeyEx(HKEY_CURRENT_USER,REGPATH,0,NULL,0,KEY_WRITE,NULL,&key,&disp)) {
if (disp==REG_CREATED_NEW_KEY) {
TCHAR desc[64]; // Beschreibung für den Leser der Registrierung
int i=LoadString(0,2,desc,elemof(desc));
RegSetValue(key,T(""),REG_SZ,desc,i*sizeof(TCHAR));
}
HKEY k2;
TCHAR keyname[16];
_sntprintf(keyname,elemof(keyname),T("%d"),nInstance);
if (!RegCreateKeyEx(key,keyname,0,NULL,0,KEY_WRITE,NULL,&k2,&disp)) {
RegSetValueEx(k2,T("VideoDevice"),0,REG_SZ,(BYTE*)g.VideoDevice,DWORD((_tcslen(g.VideoDevice)+1)*sizeof(TCHAR)));
WINDOWPLACEMENT wp;
wp.length=sizeof(wp);
GetWindowPlacement(ghwndMain,&wp);
RegSetValueEx(k2,T("WindowPos"),0,REG_BINARY,(BYTE*)&wp.rcNormalPosition,sizeof(RECT));
RegSetValueEx(k2,T("Resolution"),0,REG_SZ,(BYTE*)g.Resolution,DWORD((_tcslen(g.Resolution)+1)*sizeof(TCHAR)));
RegSetValueEx(k2,T("Pin"),0,REG_BINARY,(BYTE*)&g.Pin,sizeof g.Pin);
RegCloseKey(k2);
}
RegCloseKey(key);
}
}
void OnClose() {
// Unregister device notifications
if (ghDevNotify && UnregisterDeviceNotification(ghDevNotify)) ghDevNotify=0;
StartPreview(false);
FreeCapFilters();
if (g.pmVideo) {
WCHAR *name=NULL;
if (SUCCEEDED(g.pmVideo->GetDisplayName(0,0,&name)) // nicht der FriendlyName sondern etwas kryptisches
&& name) {
#ifdef UNICODE
wcsncpy(g.VideoDevice,name,elemof(g.VideoDevice));
#else
WideCharToMultiByte(0,0,name,-1,g.VideoDevice,elemof(g.VideoDevice),NULL,NULL);
#endif
CoTaskMemFree(name);
}
}
SaveConfig();
}
BOOL Line(HDC dc, int x1, int y1, int x2, int y2) {
POINT pt[2]={{x1,y1},{x2,y2}};
return Polyline(dc,pt,2);
}
void kreuz(HDC dc, int x, int y) {
Line(dc,x-5,y,x+5,y);
Line(dc,x,y-5,x,y+5);
}
void kreuz(HDC dc, int i) {
kreuz(dc,gummi[i].x,gummi[i].y);
}
void DoPaint(HDC dc, RECT*rc) {
/*
if (rc) {
Rectangle(dc,rc->left,rc->top,rc->right,rc->bottom);
Line(dc,rc->left,rc->top,rc->right-1,rc->bottom-1);
Line(dc,rc->left,rc->bottom-1,rc->right-1,rc->top);
}
*/
RECT r;
GetClientRect(ghwndOverlay,&r);
if (gummi[0].x!=gummi[1].x || gummi[0].y!=gummi[1].y) {
HPEN pen3=CreatePen(PS_SOLID,3,RGB(0,255,0));
HPEN open=SelectPen(dc,pen3);
int mm=SetMapMode(dc,MM_ANISOTROPIC);
SetWindowExtEx(dc,g.Pin.sz.cx,g.Pin.sz.cy,NULL);
SetViewportExtEx(dc,r.right,r.bottom,NULL);
kreuz(dc,0);
kreuz(dc,1);
HPEN pen0=CreatePen(PS_SOLID,0,RGB(0,255,0));
SelectPen(dc,pen0);
Polyline(dc,gummi,2);
SelectPen(dc,open);
DeletePen(pen3);
DeletePen(pen0);
SetMapMode(dc,mm);
}
}
LONG_PTR WINAPI OverlayWndProc(HWND Wnd, UINT Msg, WPARAM wParam, LPARAM lParam) {
switch (Msg) {
case WM_PRINTCLIENT: {
DoPaint((HDC)wParam,NULL);
}return 0;
case WM_PAINT: {
PAINTSTRUCT ps;
BeginPaint(Wnd,&ps);
DoPaint(ps.hdc,&ps.rcPaint);
EndPaint(Wnd,&ps);
}return 0;
case WM_LBUTTONDOWN: {
HDC dc=GetDC(Wnd);
SendMessage(Wnd,WM_ERASEBKGND,(WPARAM)dc,0);
HPEN pen=CreatePen(PS_SOLID,3,RGB(0,255,0));
HPEN open=SelectPen(dc,pen);
gummi[0].x=GET_X_LPARAM(lParam);
gummi[0].y=GET_Y_LPARAM(lParam);
kreuz(dc,0);
SelectPen(dc,open);
DeletePen(pen);
ReleaseDC(ghwndOverlay,dc);
gummi[1]=gummi[0];
SetCapture(Wnd);
pixlen=0;
UpdateStatusLen();
}break;
case WM_MOUSEMOVE: if (wParam&MK_LBUTTON && GetCapture()==Wnd) {
HDC dc=GetDC(Wnd);
HPEN pen=CreatePen(PS_SOLID,0,RGB(255,0,0));
HPEN open=SelectPen(dc,pen);
int rop=SetROP2(dc,R2_XORPEN); // bei weißem Hintergrund im Ergebnis türkis!
Polyline(dc,gummi,2); // wegnehmen
gummi[1].x=GET_X_LPARAM(lParam);
gummi[1].y=GET_Y_LPARAM(lParam);
Polyline(dc,gummi,2); // zeichnen
SetROP2(dc,rop);
SelectPen(dc,open);
DeletePen(pen);
ReleaseDC(ghwndOverlay,dc);
}break;
case WM_LBUTTONUP: if (GetCapture()==Wnd) {
HDC dc=GetDC(Wnd);
HPEN pen=CreatePen(PS_SOLID,3,RGB(0,255,0));
HPEN open=SelectPen(dc,pen);
gummi[1].x=GET_X_LPARAM(lParam);
gummi[1].y=GET_Y_LPARAM(lParam);
kreuz(dc,1);
SelectPen(dc,open);
DeletePen(pen);
ReleaseDC(ghwndOverlay,dc);
ReleaseCapture();
// Für's Redraw die Gummilinien-Enden in Kamerapixel umrechnen
RECT r;
GetClientRect(Wnd,&r);
for (int i=0; i<elemof(gummi); i++) {
gummi[i].x=MulDiv(gummi[i].x,g.Pin.sz.cx,r.right);
gummi[i].y=MulDiv(gummi[i].y,g.Pin.sz.cy,r.bottom);
}
CalcPixLen();
UpdateStatusLen();
EnableMenuItem(GetMenu(ghwndMain),11,gummi[1].x!=gummi[0].x || gummi[1].y!=gummi[0].y?MF_ENABLED:MF_GRAYED);
}break;
}
return DefWindowProc(Wnd,Msg,wParam,lParam);
}
LONG_PTR WINAPI MainWndProc(HWND Wnd, UINT Msg, WPARAM wParam, LPARAM lParam) {
switch (Msg) {
case WM_CREATE: {
DeleteMenu(GetSubMenu(GetMenu(Wnd),1),0,MF_BYPOSITION); // Separator entfernen
ghwndStatus = CreateStatusWindow(
WS_CHILD|WS_BORDER|WS_VISIBLE|SBARS_SIZEGRIP,
NULL,
Wnd,
2);
if (!ghwndStatus) return false;
static const int parts[]={80,-1};
SendMessage(ghwndStatus,SB_SETPARTS,elemof(parts),(LPARAM)parts);
ghwndOverlay=CreateWindowEx(
WS_EX_LAYERED|WS_EX_TRANSPARENT|WS_EX_NOACTIVATE,
MAKEINTATOM(3),
NULL,
WS_VISIBLE|WS_POPUP,
0,0,0,0,
Wnd,
0,ghInstance,0);
if (!ghwndOverlay) return false;
SetLayeredWindowAttributes(ghwndOverlay,RGB(0,0,0),255*70/100,/*LWA_ALPHA|*/LWA_COLORKEY);
// Register for device add/remove notifications.
static const DEV_BROADCAST_DEVICEINTERFACE dbdi={
sizeof(DEV_BROADCAST_DEVICEINTERFACE),
DBT_DEVTYP_DEVICEINTERFACE,
0,
AM_KSCATEGORY_CAPTURE};
ghDevNotify=RegisterDeviceNotification(Wnd,(void*)&dbdi,DEVICE_NOTIFY_WINDOW_HANDLE);
}break;
case WM_COMMAND: return OnCommand(Wnd,wParam);
case WM_DESTROY: {
DbgTerminate();
IMonRelease(g.pmVideo);
AddDevicesToMenu(false);
CoUninitialize();
PostQuitMessage(0);
}break;
case WM_CLOSE: OnClose(); break;
case WM_ENDSESSION: if (wParam || lParam&ENDSESSION_LOGOFF) OnClose(); break;
case WM_MOVE:
case WM_SIZE: {
// make the preview window fit inside our window, taking up
// all of our client area except for the status window at the bottom
if (Msg==WM_SIZE) SendMessage(ghwndStatus,Msg,wParam,lParam);
RECT r;
myVideoRect(&r);
if (Msg==WM_SIZE && g.pVW)
g.pVW->SetWindowPosition(r.left,r.top,r.right,r.bottom);
ClientToScreen(Wnd,(POINT*)&r);
MoveWindow(ghwndOverlay,r.left,r.top,r.right,r.bottom,TRUE);
}break;
case WM_FGNOTIFY:
// uh-oh, something went wrong while capturing - the filtergraph
// will send us events like EC_COMPLETE, EC_USERABORT and the one
// we care about, EC_ERRORABORT.
if (g.pME) {
LONG event, l1, l2;
bool bAbort = false;
while (g.pME->GetEvent(&event,(LONG_PTR*)&l1,(LONG_PTR*)&l2,0)==S_OK) {
g.pME->FreeEventParams(event, l1, l2);
if (event==EC_ERRORABORT) {
bAbort = true;
continue;
}else if (event==EC_DEVICE_LOST) {
// Check if we have lost a capture filter being used.
// lParam2 of EC_DEVICE_LOST event == 1 indicates device added
// == 0 indicates device removed
if (!l2) {
IBaseFilter *pf;
IUnknown *punk = (IUnknown *) (LONG_PTR) l1;
if (S_OK==punk->QueryInterface(IID_IBaseFilter, (void **) &pf)) {
if (::IsEqualObject(g.pVCap, pf)) {
pf->Release();
bAbort = FALSE;
ErrMsg(29, l1); //"Stopping Capture (Device Lost). Select New Capture Device (0x%02X)"
break;
}
pf->Release();
}
}
}
} // end while
if (bAbort) {
StartPreview();
ErrMsg(30,l1); //"ERROR during capture, error code=0x%02X"
}
}break;
case WM_CHAR: switch (wParam) {
case '+': break; // vergrößern
case '-': break; // verkleinern
case '/': OnCommand(Wnd,10); break;
case '*': break; // Einpassen
}break;
case WM_DEVICECHANGE: switch (wParam) { // We are interested in only device arrival & removal events
case DBT_DEVICEARRIVAL:
case DBT_DEVICEREMOVECOMPLETE: {
PDEV_BROADCAST_DEVICEINTERFACE di = (PDEV_BROADCAST_DEVICEINTERFACE) lParam;
if (di->dbcc_devicetype != DBT_DEVTYP_DEVICEINTERFACE) break;
if (di->dbcc_classguid != AM_KSCATEGORY_CAPTURE) break; // Check for capture devices.
AddDevicesToMenu();
}break;
}break;
}
return DefWindowProc(Wnd,Msg,wParam,lParam);
}
static BOOL CALLBACK EnumWindowsProc(HWND Wnd,LPARAM lParam) {
TCHAR n[32];
if (GetClassName(Wnd,n,elemof(n)) && !_tcscmp(n,CLASSNAME)) {
*(DWORD*)lParam|=1<<SendMessage(Wnd,WM_MARKINSTANCE,0,0);
}
return TRUE;
}
bool AppInit() {
CoInitialize(NULL);
DbgInitialise(ghInstance);
WNDCLASS wc={
CS_BYTEALIGNCLIENT|CS_VREDRAW|CS_HREDRAW|CS_DBLCLKS,
MainWndProc,
0,0,
ghInstance,
LoadIcon(ghInstance,MAKEINTRESOURCE(1)),
LoadCursor(NULL,IDC_ARROW),
(HBRUSH)(COLOR_WINDOW+1),
MAKEINTATOM(1),
CLASSNAME};
if (!RegisterClass(&wc)) return false;
wc.lpfnWndProc=OverlayWndProc;
wc.hIcon=0;
wc.hbrBackground=GetStockBrush(BLACK_BRUSH);
wc.lpszClassName=MAKEINTATOM(3);
wc.lpszMenuName=0;
if (!RegisterClass(&wc)) return false;
GetLocaleInfo(LOCALE_USER_DEFAULT,LOCALE_SDECIMAL,sDecimal,elemof(sDecimal));
TCHAR WindowTitle[64];
LoadString(0,1,WindowTitle,elemof(WindowTitle));
DWORD mask=0;
EnumWindows(EnumWindowsProc,(LPARAM)&mask);
if (!_BitScanForward((DWORD*)&nInstance,~mask)) return false; // unglaubliche 32 Instanzen aufgebraucht
if (nInstance) {
TCHAR s[8];
_sntprintf(s,elemof(s),T(" [%d]"),nInstance);
_tcsncat(WindowTitle,s,elemof(WindowTitle));
}
// InitCommonControls();
ghwndMain=CreateWindowEx(0,
CLASSNAME,
WindowTitle,
WS_OVERLAPPEDWINDOW, // Style bits
CW_USEDEFAULT,CW_USEDEFAULT,CW_USEDEFAULT,CW_USEDEFAULT,
NULL,NULL,ghInstance,NULL);
LoadConfig();
ShowWindow(ghwndMain,SW_SHOWDEFAULT);
AddDevicesToMenu();
if (!ChooseDevice(g.VideoDevice)) ChooseDevice(nInstance);
return true;
}
void WinMainCRTStartup() {
_fltused();
ghInstance=GetModuleHandle(NULL);
if (!AppInit()) return;
MSG msg;
while (GetMessage(&msg,NULL,0,0)) {
TranslateMessage(&msg);
DispatchMessage(&msg);
}
ExitProcess((UINT)msg.wParam);
}
Detected encoding: ANSI (CP1252) | 4
|
|