Search Results

Search found 418 results on 17 pages for 'uint'.

Page 6/17 | < Previous Page | 2 3 4 5 6 7 8 9 10 11 12 13  | Next Page >

  • wglCreateContext in C# failing but not in managed C++

    - by SeeR
    I'm trying to use opengl in C#. I have following code which fails with error 2000 ERROR_INVALID_PIXEL_FORMAT First definitions: [DllImport("user32.dll", CharSet = CharSet.Auto, SetLastError = true, ExactSpelling = true)] public static extern IntPtr GetDC(IntPtr hWnd); [StructLayout(LayoutKind.Sequential)] public struct PIXELFORMATDESCRIPTOR { public void Init() { nSize = (ushort) Marshal.SizeOf(typeof (PIXELFORMATDESCRIPTOR)); nVersion = 1; dwFlags = PFD_FLAGS.PFD_DRAW_TO_WINDOW | PFD_FLAGS.PFD_SUPPORT_OPENGL | PFD_FLAGS.PFD_DOUBLEBUFFER | PFD_FLAGS.PFD_SUPPORT_COMPOSITION; iPixelType = PFD_PIXEL_TYPE.PFD_TYPE_RGBA; cColorBits = 24; cRedBits = cRedShift = cGreenBits = cGreenShift = cBlueBits = cBlueShift = 0; cAlphaBits = cAlphaShift = 0; cAccumBits = cAccumRedBits = cAccumGreenBits = cAccumBlueBits = cAccumAlphaBits = 0; cDepthBits = 32; cStencilBits = cAuxBuffers = 0; iLayerType = PFD_LAYER_TYPES.PFD_MAIN_PLANE; bReserved = 0; dwLayerMask = dwVisibleMask = dwDamageMask = 0; } ushort nSize; ushort nVersion; PFD_FLAGS dwFlags; PFD_PIXEL_TYPE iPixelType; byte cColorBits; byte cRedBits; byte cRedShift; byte cGreenBits; byte cGreenShift; byte cBlueBits; byte cBlueShift; byte cAlphaBits; byte cAlphaShift; byte cAccumBits; byte cAccumRedBits; byte cAccumGreenBits; byte cAccumBlueBits; byte cAccumAlphaBits; byte cDepthBits; byte cStencilBits; byte cAuxBuffers; PFD_LAYER_TYPES iLayerType; byte bReserved; uint dwLayerMask; uint dwVisibleMask; uint dwDamageMask; } [Flags] public enum PFD_FLAGS : uint { PFD_DOUBLEBUFFER = 0x00000001, PFD_STEREO = 0x00000002, PFD_DRAW_TO_WINDOW = 0x00000004, PFD_DRAW_TO_BITMAP = 0x00000008, PFD_SUPPORT_GDI = 0x00000010, PFD_SUPPORT_OPENGL = 0x00000020, PFD_GENERIC_FORMAT = 0x00000040, PFD_NEED_PALETTE = 0x00000080, PFD_NEED_SYSTEM_PALETTE = 0x00000100, PFD_SWAP_EXCHANGE = 0x00000200, PFD_SWAP_COPY = 0x00000400, PFD_SWAP_LAYER_BUFFERS = 0x00000800, PFD_GENERIC_ACCELERATED = 0x00001000, PFD_SUPPORT_DIRECTDRAW = 0x00002000, PFD_DIRECT3D_ACCELERATED = 0x00004000, PFD_SUPPORT_COMPOSITION = 0x00008000, PFD_DEPTH_DONTCARE = 0x20000000, PFD_DOUBLEBUFFER_DONTCARE = 0x40000000, PFD_STEREO_DONTCARE = 0x80000000 } public enum PFD_LAYER_TYPES : byte { PFD_MAIN_PLANE = 0, PFD_OVERLAY_PLANE = 1, PFD_UNDERLAY_PLANE = 255 } public enum PFD_PIXEL_TYPE : byte { PFD_TYPE_RGBA = 0, PFD_TYPE_COLORINDEX = 1 } [DllImport("gdi32.dll", CharSet = CharSet.Auto, SetLastError = true, ExactSpelling = true)] public static extern int ChoosePixelFormat(IntPtr hdc, [In] ref PIXELFORMATDESCRIPTOR ppfd); [DllImport("gdi32.dll", CharSet = CharSet.Auto, SetLastError = true, ExactSpelling = true)] public static extern bool SetPixelFormat(IntPtr hdc, int iPixelFormat, ref PIXELFORMATDESCRIPTOR ppfd); [DllImport("opengl32.dll", CharSet = CharSet.Auto, SetLastError = true, ExactSpelling = true)] public static extern IntPtr wglCreateContext(IntPtr hDC); And now the code that fails: IntPtr dc = Win.GetDC(hwnd); var pixelformatdescriptor = new GL.PIXELFORMATDESCRIPTOR(); pixelformatdescriptor.Init(); var pixelFormat = GL.ChoosePixelFormat(dc, ref pixelformatdescriptor); if(!GL.SetPixelFormat(dc, pixelFormat, ref pixelformatdescriptor)) throw new Win32Exception(Marshal.GetLastWin32Error()); IntPtr hglrc; if((hglrc = GL.wglCreateContext(dc)) == IntPtr.Zero) throw new Win32Exception(Marshal.GetLastWin32Error()); //<----- here I have exception the same code in managed C++ is working HDC dc = GetDC(hWnd); PIXELFORMATDESCRIPTOR pf; pf.nSize = sizeof(PIXELFORMATDESCRIPTOR); pf.nVersion = 1; pf.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER | PFD_SUPPORT_COMPOSITION; pf.cColorBits = 24; pf.cRedBits = pf.cRedShift = pf.cGreenBits = pf.cGreenShift = pf.cBlueBits = pf.cBlueShift = 0; pf.cAlphaBits = pf.cAlphaShift = 0; pf.cAccumBits = pf.cAccumRedBits = pf.cAccumGreenBits = pf.cAccumBlueBits = pf.cAccumAlphaBits = 0; pf.cDepthBits = 32; pf.cStencilBits = pf.cAuxBuffers = 0; pf.iLayerType = PFD_MAIN_PLANE; pf.bReserved = 0; pf.dwLayerMask = pf.dwVisibleMask = pf.dwDamageMask = 0; int ipf = ChoosePixelFormat(dc, &pf); SetPixelFormat(dc, ipf, &pf); HGLRC hglrc = wglCreateContext(dc); I've tried it on VIsta 64-bit with ATI graphic card and on Windows XP 32-bit with Nvidia with the same result in both cases. Also I want to mention that I don't want to use any already written framework for it. Can anyone show me where is the bug in C# code that is causing the exception?

    Read the article

  • C# - calling ext. DLL function containing Delphi "variant record" parameter

    - by CaldonCZE
    Hello, In external (Delphi-created) DLL I've got the following function that I need to call from C# application. function ReadMsg(handle: longword; var Msg: TRxMsg): longword; stdcall; external 'MyDll.dll' name 'ReadMsg'; The "TRxMsg" type is variant record, defined as follows: TRxMsg = record case TypeMsg: byte of 1: (accept, mask: longword); 2: (SN: string[6]); 3: (rx_rate, tx_rate: word); 4: (rx_status, tx_status, ctl0, ctl1, rflg: byte); end; In order to call the function from C#, I declared auxiliary structure "my9Bytes" containing array of bytes and defined that it should be marshalled as 9 bytes long array (which is exactly the size of the Delphi record). private struct my9Bytes { [MarshalAs(UnmanagedType.ByValArray, ArraySubType = UnmanagedType.U1, SizeConst = 9)] public byte[] data; } Then I declared the imported "ReadMsg" function, using the "my9bytes" struct. [DllImport("MyDll.dll")] private static extern uint ReadMsg(uint handle, ref my9Bytes myMsg); I can call the function with no problem... Then I need to create structure corresponding to the original "TRxMsg" variant record and convert my auxiliary "myMsg" array into this structure. I don't know any C# equivalent of Delphi variant array, so I used inheritance and created the following classes. public abstract class TRxMsg { public byte typeMsg; } public class TRxMsgAcceptMask:TRxMsg { public uint accept, mask; //... } public class TRxMsgSN:TRxMsg { public string SN; //... } public class TRxMsgMRate:TRxMsg { public ushort rx_rate, tx_rate; //... } public class TRxMsgStatus:TRxMsg { public byte rx_status, tx_status, ctl0, ctl1, rflg; //... } Finally I create the appropriate object and initialize it with values manually converted from "myMsg" array (I used BitConverter for this). This does work fine, this solution seems to me a little too complicated, and that it should be possible to do this somehow more directly, without the auxiliary "my9bytes" structures or the inheritance and manual converting of individual values. So I'd like to ask you for a suggestions for the best way to do this. Thanks a lot!

    Read the article

  • Assigned numbers to movie clip to plug in formula... not working

    - by Matthew MlgPro Harding
    So I am attempting to vertically evenly space these movie clips so I came up with a math formula involving n( the button number) but Its not working. var buttonArray:Array = [ side_banner.btn1, side_banner.btn2, side_banner.btn3, side_banner.btn4]; var buttonCount:uint = buttonArray.length; for (var i:uint=0; i< buttonCount; i++) { buttonArray[i].addEventListener(MouseEvent.CLICK, outputNumber); buttonArray[i].theTrigger = [i + 1]; } function outputNumber(e:MouseEvent):void { trace( e.target.theTrigger); buttonArray[i].y = (((stage.stageHeight - 400)/4)*(e.target.theTrigger)) - ((stage.stageHeight - 400)/4)/2 } But apparently each movie clip doesn't actually have a numerical value just a numeric name... how can I get the "n" btn number to use my formula? Thanks

    Read the article

  • 1136: Incorrect number of arguments. Expected 0.? AS3 Flash Cs4 (Round 2)

    - by charmaine
    (I have asked this before but I dont think I was direct enough with my question and therefore it did not get resolved so here goes again!) I am working through a book called Foundation Actionscript 3.0 Animation, making things move. I am now on Chapter 9 - Collision Detection. On two lines of my code I get the 1135 error, letting me know that I have an incorrect number of arguments. I have highlighted the two areas in which this occurs with asterisks: package { import flash.display.Sprite; import flash.events.Event; public class Bubbles extends Sprite { private var balls:Array; private var numBalls:Number = 10; private var centerBall:Ball; private var bounce:Number = -1; private var spring:Number = 0.2; public function Bubbles() { init(); } private function init():void { balls = new Array(); ***centerBall = new Ball(100, 0xcccccc);*** addChild(centerBall); centerBall.x = stage.stageWidth / 2; centerBall.y = stage.stageHeight / 2; for(var i:uint = 0; i < numBalls; i++) { ***var ball:Ball = new Ball(Math.random() * 40 + 5, Math.random() * 0xffffff);*** ball.x = Math.random() * stage.stageWidth; ball.y = Math.random() * stage.stageHeight; ball.vx = Math.random() * 6 - 3; ball.vy = Math.random() * 6 - 3; addChild(ball); balls.push(ball); } addEventListener(Event.ENTER_FRAME, onEnterFrame); } private function onEnterFrame(event:Event):void { for(var i:uint = 0; i < numBalls; i++) { var ball:Ball = balls[i]; move(ball); var dx:Number = ball.x - centerBall.x; var dy:Number = ball.y - centerBall.y; var dist:Number = Math.sqrt(dx * dx + dy * dy); var minDist:Number = ball.radius + centerBall.radius; if(dist < minDist) { var angle:Number = Math.atan2(dy, dx); var tx:Number = centerBall.x + Math.cos(angle) * minDist; var ty:Number = centerBall.y + Math.sin(angle) * minDist; ball.vx += (tx - ball.x) * spring; ball.vy += (ty - ball.y) * spring; } } } private function move(ball:Ball):void { ball.x += ball.vx; ball.y += ball.vy; if(ball.x + ball.radius > stage.stageWidth) { ball.x = stage.stageWidth - ball.radius; ball.vx *= bounce; } else if(ball.x - ball.radius < 0) { ball.x = ball.radius; ball.vx *= bounce; } if(ball.y + ball.radius > stage.stageHeight) { ball.y = stage.stageHeight - ball.radius; ball.vy *= bounce; } else if(ball.y - ball.radius < 0) { ball.y = ball.radius; ball.vy *= bounce; } } } } I think this is due to the non-existance of a Ball.as, when reading the tutorial I assumed it meant that I had to create a movie clip of a ball on stage and then export it for actionscript with the class name being Ball, however when flicking back through the book I saw that a Ball.as already existed, stating that I may need to use this again later on in the book, this read: package { import flash.display.Sprite; public class Ball extends Sprite { private var radius:Number; private var color:uint; public var vx:Number=0; public var vy:Number=0; public function Ball(radius:Number=40, color:uint=0xff0000) { this.radius=radius; this.color=color; init(); } public function init():void { graphics.beginFill(color); graphics.drawCircle(0, 0, radius); graphics.endFill(); } } } This managed to stop all the errors appearing however, it did not transmit any of the effects from Bubbles.as it just braught a Red Ball on the center of the stage. How would I alter this code in order to work in favour of Bubbles.as? Please Help! Thanks!

    Read the article

  • Strange error with CreateCompatibleDC

    - by sevaxx
    Maybe this is a foolish question, I can't see why I can not get a DC created in the following code : HBITMAP COcrDlg::LoadClippedBitmap(LPCTSTR pathName,UINT maxWidth,UINT maxHeight) { HBITMAP hBmp = (HBITMAP)::LoadImage(NULL, pathName, IMAGE_BITMAP, 0, 0, LR_LOADFROMFILE | LR_CREATEDIBSECTION); if (!hBmp) return NULL; HDC hdc = (HDC)GetDC(); HDC hdcMem = CreateCompatibleDC(hdc); if (!hdcMem) { DWORD err = GetLastError(); } ... ... ... The bitmap hBmp is loaded fine and hdc has a valid value. But the call to CreateCompatibleDC() returns a NULL pointer. Then, GetLastError() returns 0 ! Anybody can guess what's going on here , please ? PS : There are no memory allocations or GDI routines called before this one...so I think memory leaks should be ruled out.

    Read the article

  • how to use a class method as a WIN32 application callback method (WINPROC)... Error static struct HI

    - by numerical25
    I am receiving errors and at the same time trying to make this work so please read what I got to say. Thanks.... I am creating a c++ application and majority of the application is encapsulated into a class. That means that my WinProc function is a static class method that I use as my call back method for my win32 application. The problem is that since I created that as a win32 application, every class member I use inside that method has to be static as well. Including my HINSTANCE class which I use inside of it. Now I receive this error error LNK2001: unresolved external symbol "public: static struct HINSTANCE__ I need to figure out how to make this all work and below is my class declaration. My static member static HINSTANCE m_hInst is causing the error I believe. #pragma once #include "stdafx.h" #include "resource.h" #define MAX_LOADSTRING 100 class RenderEngine { protected: int m_width; int m_height; ATOM RegisterEngineClass(); public: static HINSTANCE m_hInst; //<------ This is causing the error HWND m_hWnd; int m_nCmdShow; TCHAR m_szTitle[MAX_LOADSTRING]; // The title bar text TCHAR m_szWindowClass[MAX_LOADSTRING]; // the main window class name bool InitWindow(); bool InitDirectX(); bool InitInstance(); //static functions static LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam); static INT_PTR CALLBACK About(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam); int Run(); }; Below is the implementation #include "stdafx.h" #include "RenderEngine.h" bool RenderEngine::InitWindow() { RenderEngine::m_hInst = NULL; // Initialize global strings LoadString(m_hInst, IDS_APP_TITLE, m_szTitle, MAX_LOADSTRING); LoadString(m_hInst, IDC_RENDERENGINE, m_szWindowClass, MAX_LOADSTRING); if(!RegisterEngineClass()) { return false; } if(!InitInstance()) { return false; } return true; } ATOM RenderEngine::RegisterEngineClass() { WNDCLASSEX wcex; wcex.cbSize = sizeof(WNDCLASSEX); wcex.style = CS_HREDRAW | CS_VREDRAW; wcex.lpfnWndProc = RenderEngine::WndProc; wcex.cbClsExtra = 0; wcex.cbWndExtra = 0; wcex.hInstance = m_hInst; wcex.hIcon = LoadIcon(m_hInst, MAKEINTRESOURCE(IDI_RENDERENGINE)); wcex.hCursor = LoadCursor(NULL, IDC_ARROW); wcex.hbrBackground = (HBRUSH)(COLOR_WINDOW+1); wcex.lpszMenuName = MAKEINTRESOURCE(IDC_RENDERENGINE); wcex.lpszClassName = m_szWindowClass; wcex.hIconSm = LoadIcon(wcex.hInstance, MAKEINTRESOURCE(IDI_SMALL)); return RegisterClassEx(&wcex); } LRESULT CALLBACK RenderEngine::WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) { int wmId, wmEvent; PAINTSTRUCT ps; HDC hdc; switch (message) { case WM_COMMAND: wmId = LOWORD(wParam); wmEvent = HIWORD(wParam); // Parse the menu selections: switch (wmId) { case IDM_ABOUT: DialogBox(m_hInst, MAKEINTRESOURCE(IDD_ABOUTBOX), hWnd, About); break; case IDM_EXIT: DestroyWindow(hWnd); break; default: return DefWindowProc(hWnd, message, wParam, lParam); } break; case WM_PAINT: hdc = BeginPaint(hWnd, &ps); // TODO: Add any drawing code here... EndPaint(hWnd, &ps); break; case WM_DESTROY: PostQuitMessage(0); break; default: return DefWindowProc(hWnd, message, wParam, lParam); } return 0; } bool RenderEngine::InitInstance() { m_hWnd = NULL; m_hWnd = CreateWindow(m_szWindowClass, m_szTitle, WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, 0, CW_USEDEFAULT, 0, NULL, NULL, m_hInst, NULL); if (!m_hWnd) { return FALSE; } if(!ShowWindow(m_hWnd, m_nCmdShow)) { return false; } UpdateWindow(m_hWnd); return true; } // Message handler for about box. INT_PTR CALLBACK RenderEngine::About(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam) { UNREFERENCED_PARAMETER(lParam); switch (message) { case WM_INITDIALOG: return (INT_PTR)TRUE; case WM_COMMAND: if (LOWORD(wParam) == IDOK || LOWORD(wParam) == IDCANCEL) { EndDialog(hDlg, LOWORD(wParam)); return (INT_PTR)TRUE; } break; } return (INT_PTR)FALSE; } int RenderEngine::Run() { MSG msg; HACCEL hAccelTable; hAccelTable = LoadAccelerators(m_hInst, MAKEINTRESOURCE(IDC_RENDERENGINE)); // Main message loop: while (GetMessage(&msg, NULL, 0, 0)) { if (!TranslateAccelerator(msg.hwnd, hAccelTable, &msg)) { TranslateMessage(&msg); DispatchMessage(&msg); } } return (int) msg.wParam; } and below is the code being used within the WinMain RenderEngine go; int APIENTRY _tWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPTSTR lpCmdLine, int nCmdShow) { UNREFERENCED_PARAMETER(hPrevInstance); UNREFERENCED_PARAMETER(lpCmdLine); // TODO: Place code here. RenderEngine::m_hInst = hInstance; go.m_nCmdShow = nCmdShow; if(!go.InitWindow()) { return 0; } go.Run(); return 0; } If anything does not make any sense, then I apologize, I am a newb. Thanks for the help!!

    Read the article

  • actionscript display timer

    - by Aaron
    ok so im using real basic code for a small game and ive got a timer set up on one room and cant get it to display in the endgame room please help? this is the code i used var gameStartTime:uint; var gameTime:uint; var gameTimeField:TextField; gameTimeField = new TextField(); gameTimeField.x = 900; gameTimeField.y = 50; addChild(gameTimeField); gameStartTime = getTimer(); gameTime = 0; addEventListener(Event.ENTER_FRAME,showTime); function showTime(event:Event) {gameTime = getTimer()-gameStartTime; gameTimeField.text = "Time: "+clockTime(gameTime); } function clockTime(ms:int) { var seconds:int = Math.floor(ms/100); var minutes:int = Math.floor(seconds/60); seconds -= minutes*60; var timeString:String = minutes+":"+String(seconds+100).substr(1,2); return timeString; }

    Read the article

  • Calling assembly in GCC????

    - by rbr200
    include static inline uint xchg(volatile unsigned int *addr, unsigned int newval) { uint result; asm volatile("lock; xchgl %0, %1" : "+m" (*addr), "=a" (result) : "1" (newval) : "cc"); return result; } Can some one tell me what this code does exactly. I mean I have an idea or the parts of this command. "1" newval is the input, "=a" is to flush out its previous value and update it. "m" is for the memory operation but I am confused about the functionality of this function. What does the "+m" sign do? Does this function do sumthing like m=a; m = newval; return a

    Read the article

  • my window's handle is unused and cannot be evaluated

    - by numerical25
    I am trying to encapsulate my Win32 application into a class. My problem occurs when trying to initiate my primary window for the application below is my declaration and implementation... I notice the issue within my class method InitInstance(); declaration #pragma once #include "stdafx.h" #include "resource.h" #define MAX_LOADSTRING 100 class RenderEngine { protected: int m_width; int m_height; ATOM RegisterEngineClass(); public: static HINSTANCE m_hInst; HWND m_hWnd; int m_nCmdShow; TCHAR m_szTitle[MAX_LOADSTRING]; // The title bar text TCHAR m_szWindowClass[MAX_LOADSTRING]; // the main window class name bool InitWindow(); bool InitDirectX(); bool InitInstance(); //static functions static LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam); static INT_PTR CALLBACK About(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam); int Run(); }; implementation #include "stdafx.h" #include "RenderEngine.h" HINSTANCE RenderEngine::m_hInst = NULL; bool RenderEngine::InitWindow() { RenderEngine::m_hInst = NULL; // Initialize global strings LoadString(m_hInst, IDS_APP_TITLE, m_szTitle, MAX_LOADSTRING); LoadString(m_hInst, IDC_RENDERENGINE, m_szWindowClass, MAX_LOADSTRING); if(!RegisterEngineClass()) { return false; } if(!InitInstance()) { return false; } return true; } ATOM RenderEngine::RegisterEngineClass() { WNDCLASSEX wcex; wcex.cbSize = sizeof(WNDCLASSEX); wcex.style = CS_HREDRAW | CS_VREDRAW; wcex.lpfnWndProc = RenderEngine::WndProc; wcex.cbClsExtra = 0; wcex.cbWndExtra = 0; wcex.hInstance = m_hInst; wcex.hIcon = LoadIcon(m_hInst, MAKEINTRESOURCE(IDI_RENDERENGINE)); wcex.hCursor = LoadCursor(NULL, IDC_ARROW); wcex.hbrBackground = (HBRUSH)(COLOR_WINDOW+1); wcex.lpszMenuName = MAKEINTRESOURCE(IDC_RENDERENGINE); wcex.lpszClassName = m_szWindowClass; wcex.hIconSm = LoadIcon(wcex.hInstance, MAKEINTRESOURCE(IDI_SMALL)); return RegisterClassEx(&wcex); } LRESULT CALLBACK RenderEngine::WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) { int wmId, wmEvent; PAINTSTRUCT ps; HDC hdc; switch (message) { case WM_COMMAND: wmId = LOWORD(wParam); wmEvent = HIWORD(wParam); // Parse the menu selections: switch (wmId) { case IDM_ABOUT: DialogBox(m_hInst, MAKEINTRESOURCE(IDD_ABOUTBOX), hWnd, About); break; case IDM_EXIT: DestroyWindow(hWnd); break; default: return DefWindowProc(hWnd, message, wParam, lParam); } break; case WM_PAINT: hdc = BeginPaint(hWnd, &ps); // TODO: Add any drawing code here... EndPaint(hWnd, &ps); break; case WM_DESTROY: PostQuitMessage(0); break; default: return DefWindowProc(hWnd, message, wParam, lParam); } return 0; } bool RenderEngine::InitInstance() { m_hWnd = NULL;// When I step into my code it says on this line 0x000000 unused = ??? expression cannot be evaluated m_hWnd = CreateWindow(m_szWindowClass, m_szTitle, WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, 0, CW_USEDEFAULT, 0, NULL, NULL, m_hInst, NULL); if (!m_hWnd)// At this point, memory has been allocated unused = ??. It steps over this { return FALSE; } if(!ShowWindow(m_hWnd, m_nCmdShow))// m_nCmdShow = 1 and m_hWnd is still unused and expression {//Still cannot be evaluated. This statement is true. and shuts down. return false; } UpdateWindow(m_hWnd); return true; } // Message handler for about box. INT_PTR CALLBACK RenderEngine::About(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam) { UNREFERENCED_PARAMETER(lParam); switch (message) { case WM_INITDIALOG: return (INT_PTR)TRUE; case WM_COMMAND: if (LOWORD(wParam) == IDOK || LOWORD(wParam) == IDCANCEL) { EndDialog(hDlg, LOWORD(wParam)); return (INT_PTR)TRUE; } break; } return (INT_PTR)FALSE; } int RenderEngine::Run() { MSG msg; HACCEL hAccelTable; hAccelTable = LoadAccelerators(m_hInst, MAKEINTRESOURCE(IDC_RENDERENGINE)); // Main message loop: while (GetMessage(&msg, NULL, 0, 0)) { if (!TranslateAccelerator(msg.hwnd, hAccelTable, &msg)) { TranslateMessage(&msg); DispatchMessage(&msg); } } return (int) msg.wParam; } and my winMain function that calls the class // RenderEngine.cpp : Defines the entry point for the application. #include "stdafx.h" #include "RenderEngine.h" // Global Variables: RenderEngine go; int APIENTRY _tWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPTSTR lpCmdLine, int nCmdShow) { UNREFERENCED_PARAMETER(hPrevInstance); UNREFERENCED_PARAMETER(lpCmdLine); // TODO: Place code here. RenderEngine::m_hInst = hInstance; go.m_nCmdShow = nCmdShow; if(!go.InitWindow()) { return 0; } go.Run(); return 0; }

    Read the article

  • C# Bug or Brain Teaser? Cast working only with Coalesce (??) Operator

    - by Alex
    This is very strange, maybe someone can explain what's happening, or this is a bug (though I tend to think that this is probably just something intricate about C#). The following code throws the error "Cannot implicitly convert type 'uint?' to 'uint'.": public void Test(UInt32? p) { UInt32 x = p; } However, this code works without error: public void Test(UInt32? p) { UInt32 x = p ?? 1; } Huh? Why does this work? Why would the coalesce operator cause implicit conversion of UInt32? (nullable) to UInt32 (non-nullable), while the first error message says that there is no implicit conversion between those types?

    Read the article

  • Can anyone tell me were I am making mistake in the snippet

    - by Solitaire
    public partial class Form1 : Form { [DllImport("coredll.dll")] static extern int SetWindowLong(IntPtr hWnd, int nIndex, int dwNewLong); const int GWL_WNDPROC = -4; public delegate int WindProc(IntPtr hWnd, uint msg, long Wparam, long lparam); public Form1() { InitializeComponent(); WindProc SampleProc = new WindProc (SubclassWndProc); SetWindowLong(this .Handle , GWL_WNDPROC, SampleProc.Method .MethodHandle.Value.ToInt32()); } public int SubclassWndProc(IntPtr hwnd, uint msg, long Wparam, long lparam) { return 1; } Here is the sample which i was trying to take the window procedure of a form, this is how i do in C++ i get the windwproc easlily if i try the same in C# .net 3.5 i am unable to get the window proc,, after calling SetWindowLong API application hangs and it pops up some dont send report... i have read this is the way to get the window proc.. please let me know were i am making mistake...

    Read the article

  • StreamSocket to Stream in windows phone 8

    - by Abouzar Nouri
    I am trying to use Bluetooth in WP8 to send an image from my app on a device and show it in my app on another device. The receiver has a StreamSocket object to read the data from it like the code below await _dataReader.LoadAsync(4); uint messageLen = (uint)_dataReader.ReadInt32(); await _dataReader.LoadAsync(messageLen); string imageData _dataReader.ReadString(messageLen); Then I have to save all the received data (image) on the device and then create a Stream object from the stored data to give it to the BitmapSource.SetSource(Stream) function to be able to show this image on my app. var image = new BitmapImage(); image.SetSource(stream); All I want to do is not to skip this double work and directly convert the StreamSocket object (from Bluetooth connection) to a Stream object and make the BitmapSource image. Is there any way to do this?

    Read the article

  • Win32_PageFileUsage WMI call returning 0 for CurrentUsage

    - by Ryan Duffield
    I am querying for the current page file usage on Windows-based systems. This is meant to run on the .NET Framework, version 2.0 SP1 and has been tested (and works!) on Windows Vista, Windows 7, Windows Server 2003 R2 and Windows Server 2008. On Windows Server 2003 SP2 (original release, not R2) it appears to return 0: using (var query = new ManagementObjectSearcher("SELECT CurrentUsage FROM Win32_PageFileUsage")) { foreach (ManagementBaseObject obj in query.Get()) { uint used = (uint)obj.GetPropertyValue("CurrentUsage"); return used; } } This is simply returning the results from the first row returned for the WMI call. Even when a page file is being used, it returns 0. What causes the result to be 0 when it should be returning a larger value? It may be something specific to the machine in question, and could have nothing to do with the operating system version. I've also tried this with and without elevated privileges with the same results.

    Read the article

  • Passing User Data with SetTimer

    - by GJ
    I am calling SetTimer in a function of a Class. SetTimer(NULL, 0, 10000, (TIMERPROC) TimerCallBack); Where TimerCallBack is: static VOID CALLBACK TimerCallBack(HWND, UINT, UINT, DWORD) Now my need is to call one of the method of class which initiated timer, since TimerCallBack is static it has no access to the class object anymore. I cant find any way to pass object pointer along with the SetTimer so that I can receive it back on Callback function. Is there any other way to achieve this, if its not supported using SetTimer then which other way I can implement this.

    Read the article

  • Reversing a hash function

    - by martani_net
    Hi, I have the following hash function, and I'm trying to get my way to reverse it, so that I can find the key from a hashed value. uint Hash(string s) { uint result = 0; for (int i = 0; i < s.Length; i++) { result = ((result << 5) + result) + s[i]; } return result; } The code is in C# but I assume it is clear. I am aware that for one hashed value, there can be more than one key, but my intent is not to find them all, just one that satisfies the hash function suffices. Any ideas? Thank you.

    Read the article

  • From Binary to Data Structures

    - by Cédric Menzi
    Table of Contents Introduction PE file format and COFF header COFF file header BaseCoffReader Byte4ByteCoffReader UnsafeCoffReader ManagedCoffReader Conclusion History This article is also available on CodeProject Introduction Sometimes, you want to parse well-formed binary data and bring it into your objects to do some dirty stuff with it. In the Windows world most data structures are stored in special binary format. Either we call a WinApi function or we want to read from special files like images, spool files, executables or may be the previously announced Outlook Personal Folders File. Most specifications for these files can be found on the MSDN Libarary: Open Specification In my example, we are going to get the COFF (Common Object File Format) file header from a PE (Portable Executable). The exact specification can be found here: PECOFF PE file format and COFF header Before we start we need to know how this file is formatted. The following figure shows an overview of the Microsoft PE executable format. Source: Microsoft Our goal is to get the PE header. As we can see, the image starts with a MS-DOS 2.0 header with is not important for us. From the documentation we can read "...After the MS DOS stub, at the file offset specified at offset 0x3c, is a 4-byte...". With this information we know our reader has to jump to location 0x3c and read the offset to the signature. The signature is always 4 bytes that ensures that the image is a PE file. The signature is: PE\0\0. To prove this we first seek to the offset 0x3c, read if the file consist the signature. So we need to declare some constants, because we do not want magic numbers.   private const int PeSignatureOffsetLocation = 0x3c; private const int PeSignatureSize = 4; private const string PeSignatureContent = "PE";   Then a method for moving the reader to the correct location to read the offset of signature. With this method we always move the underlining Stream of the BinaryReader to the start location of the PE signature.   private void SeekToPeSignature(BinaryReader br) { // seek to the offset for the PE signagure br.BaseStream.Seek(PeSignatureOffsetLocation, SeekOrigin.Begin); // read the offset int offsetToPeSig = br.ReadInt32(); // seek to the start of the PE signature br.BaseStream.Seek(offsetToPeSig, SeekOrigin.Begin); }   Now, we can check if it is a valid PE image by reading of the next 4 byte contains the content PE.   private bool IsValidPeSignature(BinaryReader br) { // read 4 bytes to get the PE signature byte[] peSigBytes = br.ReadBytes(PeSignatureSize); // convert it to a string and trim \0 at the end of the content string peContent = Encoding.Default.GetString(peSigBytes).TrimEnd('\0'); // check if PE is in the content return peContent.Equals(PeSignatureContent); }   With this basic functionality we have a good base reader class to try the different methods of parsing the COFF file header. COFF file header The COFF header has the following structure: Offset Size Field 0 2 Machine 2 2 NumberOfSections 4 4 TimeDateStamp 8 4 PointerToSymbolTable 12 4 NumberOfSymbols 16 2 SizeOfOptionalHeader 18 2 Characteristics If we translate this table to code, we get something like this:   [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] public struct CoffHeader { public MachineType Machine; public ushort NumberOfSections; public uint TimeDateStamp; public uint PointerToSymbolTable; public uint NumberOfSymbols; public ushort SizeOfOptionalHeader; public Characteristic Characteristics; } BaseCoffReader All readers do the same thing, so we go to the patterns library in our head and see that Strategy pattern or Template method pattern is sticked out in the bookshelf. I have decided to take the template method pattern in this case, because the Parse() should handle the IO for all implementations and the concrete parsing should done in its derived classes.   public CoffHeader Parse() { using (var br = new BinaryReader(File.Open(_fileName, FileMode.Open, FileAccess.Read, FileShare.Read))) { SeekToPeSignature(br); if (!IsValidPeSignature(br)) { throw new BadImageFormatException(); } return ParseInternal(br); } } protected abstract CoffHeader ParseInternal(BinaryReader br);   First we open the BinaryReader, seek to the PE signature then we check if it contains a valid PE signature and rest is done by the derived implementations. Byte4ByteCoffReader The first solution is using the BinaryReader. It is the general way to get the data. We only need to know which order, which data-type and its size. If we read byte for byte we could comment out the first line in the CoffHeader structure, because we have control about the order of the member assignment.   protected override CoffHeader ParseInternal(BinaryReader br) { CoffHeader coff = new CoffHeader(); coff.Machine = (MachineType)br.ReadInt16(); coff.NumberOfSections = (ushort)br.ReadInt16(); coff.TimeDateStamp = br.ReadUInt32(); coff.PointerToSymbolTable = br.ReadUInt32(); coff.NumberOfSymbols = br.ReadUInt32(); coff.SizeOfOptionalHeader = (ushort)br.ReadInt16(); coff.Characteristics = (Characteristic)br.ReadInt16(); return coff; }   If the structure is as short as the COFF header here and the specification will never changed, there is probably no reason to change the strategy. But if a data-type will be changed, a new member will be added or ordering of member will be changed the maintenance costs of this method are very high. UnsafeCoffReader Another way to bring the data into this structure is using a "magically" unsafe trick. As above, we know the layout and order of the data structure. Now, we need the StructLayout attribute, because we have to ensure that the .NET Runtime allocates the structure in the same order as it is specified in the source code. We also need to enable "Allow unsafe code (/unsafe)" in the project's build properties. Then we need to add the following constructor to the CoffHeader structure.   [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] public struct CoffHeader { public CoffHeader(byte[] data) { unsafe { fixed (byte* packet = &data[0]) { this = *(CoffHeader*)packet; } } } }   The "magic" trick is in the statement: this = *(CoffHeader*)packet;. What happens here? We have a fixed size of data somewhere in the memory and because a struct in C# is a value-type, the assignment operator = copies the whole data of the structure and not only the reference. To fill the structure with data, we need to pass the data as bytes into the CoffHeader structure. This can be achieved by reading the exact size of the structure from the PE file.   protected override CoffHeader ParseInternal(BinaryReader br) { return new CoffHeader(br.ReadBytes(Marshal.SizeOf(typeof(CoffHeader)))); }   This solution is the fastest way to parse the data and bring it into the structure, but it is unsafe and it could introduce some security and stability risks. ManagedCoffReader In this solution we are using the same approach of the structure assignment as above. But we need to replace the unsafe part in the constructor with the following managed part:   [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] public struct CoffHeader { public CoffHeader(byte[] data) { IntPtr coffPtr = IntPtr.Zero; try { int size = Marshal.SizeOf(typeof(CoffHeader)); coffPtr = Marshal.AllocHGlobal(size); Marshal.Copy(data, 0, coffPtr, size); this = (CoffHeader)Marshal.PtrToStructure(coffPtr, typeof(CoffHeader)); } finally { Marshal.FreeHGlobal(coffPtr); } } }     Conclusion We saw that we can parse well-formed binary data to our data structures using different approaches. The first is probably the clearest way, because we know each member and its size and ordering and we have control about the reading the data for each member. But if add member or the structure is going change by some reason, we need to change the reader. The two other solutions use the approach of the structure assignment. In the unsafe implementation we need to compile the project with the /unsafe option. We increase the performance, but we get some security risks.

    Read the article

  • Why doesn't my texture display with this GLSL shader?

    - by Chewy Gumball
    I am trying to display a DXT1 compressed texture on a quad using a VBO and shaders, but I have been unable to get it working. All I get is a black square. I know my texture is uploaded properly because when I use immediate mode without shaders the texture displays fine but I will include that part just in case. Also, when I change the gl_FragColor to something like vec4 (0.0, 1.0, 1.0, 1.0) then I get a nice blue quad so I know that my shader is able to set the colour. It appears to be either the texture is not being bound correctly in the shader or the texture coordinates are not being picked up. However, I can't find the error! What am I doing wrong? I am using OpenTK in C# (not xna). Vertex Shader: void main() { gl_TexCoord[0] = gl_MultiTexCoord0; // Set the position of the current vertex gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex; } Fragment Shader: uniform sampler2D diffuseTexture; void main() { // Set the output color of our current pixel gl_FragColor = texture2D(diffuseTexture, gl_TexCoord[0].st); //gl_FragColor = vec4 (0.0,1.0,1.0,1.0); } Drawing Code: int vb, eb; GL.GenBuffers(1, out vb); GL.GenBuffers(1, out eb); // Position Texture float[] verts = { 0.1f, 0.1f, 0.0f, 0.0f, 0.0f, 1.9f, 0.1f, 0.0f, 1.0f, 0.0f, 1.9f, 1.9f, 0.0f, 1.0f, 1.0f, 0.1f, 1.9f, 0.0f, 0.0f, 1.0f }; uint[] indices = { 0, 1, 2, 0, 2, 3 }; //upload data to the VBO GL.BindBuffer(BufferTarget.ArrayBuffer, vb); GL.BindBuffer(BufferTarget.ElementArrayBuffer, eb); GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(verts.Length * sizeof(float)), verts, BufferUsageHint.StaticDraw); GL.BufferData(BufferTarget.ElementArrayBuffer, (IntPtr)(indices.Length * sizeof(uint)), indices, BufferUsageHint.StaticDraw); //Upload texture int buffer = GL.GenTexture(); GL.BindTexture(TextureTarget.Texture2D, buffer); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (float)TextureWrapMode.Repeat); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (float)TextureWrapMode.Repeat); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (float)TextureMagFilter.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (float)TextureMinFilter.Linear); GL.TexEnv(TextureEnvTarget.TextureEnv, TextureEnvParameter.TextureEnvMode, (float)TextureEnvMode.Modulate); GL.CompressedTexImage2D(TextureTarget.Texture2D, 0, texture.format, texture.width, texture.height, 0, texture.data.Length, texture.data); //Draw GL.UseProgram(shaderProgram); GL.EnableClientState(ArrayCap.VertexArray); GL.EnableClientState(ArrayCap.TextureCoordArray); GL.VertexPointer(3, VertexPointerType.Float, 5 * sizeof(float), 0); GL.TexCoordPointer(2, TexCoordPointerType.Float, 5 * sizeof(float), 3); GL.ActiveTexture(TextureUnit.Texture0); GL.Uniform1(GL.GetUniformLocation(shaderProgram, "diffuseTexture"), 0); GL.DrawElements(BeginMode.Triangles, indices.Length, DrawElementsType.UnsignedInt, 0);

    Read the article

  • WMI Remote Process Starting

    - by Goober
    Scenario I've written a WMI Wrapper that seems to be quite sufficient, however whenever I run the code to start a remote process on a server, I see the process name appear in the task manager but the process itself does not start like it should (as in, I don't see the command line log window of the process that prints out what it's doing etc.) The process I am trying to start is just a C# application executable that I have written. Below is my WMI Wrapper Code and the code I am using to start running the process. Question Is the process actually running? - Even if it is only displaying the process name in the task manager and not actually launching the application to the users window? Code To Start The Process IPHostEntry hostEntry = Dns.GetHostEntry("InsertServerName"); WMIWrapper wrapper = new WMIWrapper("Insert User Name", "Insert Password", hostEntry.HostName); List<Process> processes = wrapper.GetProcesses(); foreach (Process process in processes) { if (process.Caption.Equals("MyAppName.exe")) { Console.WriteLine(process.Caption); Console.WriteLine(process.CommandLine); int processId; wrapper.StartProcess("E:\\MyData\\Data\\MyAppName.exe", out processId); Console.WriteLine(processId.ToString()); } } Console.ReadLine(); WMI Wrapper Code using System; using System.Collections.Generic; using System.Management; using System.Runtime.InteropServices; using Common.WMI.Objects; using System.Net; namespace Common.WMIWrapper { public class WMIWrapper : IDisposable { #region Constructor /// <summary> /// Creates a new instance of the wrapper /// </summary> /// <param jobName="username"></param> /// <param jobName="password"></param> /// <param jobName="server"></param> public WMIWrapper(string server) { Initialise(server); } /// <summary> /// Creates a new instance of the wrapper /// </summary> /// <param jobName="username"></param> /// <param jobName="password"></param> /// <param jobName="server"></param> public WMIWrapper(string username, string password, string server) { Initialise(username, password, server); } #endregion #region Destructor /// <summary> /// Clean up unmanaged references /// </summary> ~WMIWrapper() { Dispose(false); } #endregion #region Initialise /// <summary> /// Initialise the WMI Connection (local machine) /// </summary> /// <param name="server"></param> private void Initialise(string server) { m_server = server; // set connection options m_connectOptions = new ConnectionOptions(); IPHostEntry host = Dns.GetHostEntry(Environment.MachineName); } /// <summary> /// Initialise the WMI connection /// </summary> /// <param jobName="username">Username to connect to server with</param> /// <param jobName="password">Password to connect to server with</param> /// <param jobName="server">Server to connect to</param> private void Initialise(string username, string password, string server) { m_server = server; // set connection options m_connectOptions = new ConnectionOptions(); IPHostEntry host = Dns.GetHostEntry(Environment.MachineName); if (host.HostName.Equals(server, StringComparison.OrdinalIgnoreCase)) return; m_connectOptions.Username = username; m_connectOptions.Password = password; m_connectOptions.Impersonation = ImpersonationLevel.Impersonate; m_connectOptions.EnablePrivileges = true; } #endregion /// <summary> /// Return a list of available wmi namespaces /// </summary> /// <returns></returns> public List<String> GetWMINamespaces() { ManagementScope wmiScope = new ManagementScope(String.Format("\\\\{0}\\root", this.Server), this.ConnectionOptions); List<String> wmiNamespaceList = new List<String>(); ManagementClass wmiNamespaces = new ManagementClass(wmiScope, new ManagementPath("__namespace"), null); ; foreach (ManagementObject ns in wmiNamespaces.GetInstances()) wmiNamespaceList.Add(ns["Name"].ToString()); return wmiNamespaceList; } /// <summary> /// Return a list of available classes in a namespace /// </summary> /// <param jobName="wmiNameSpace">Namespace to get wmi classes for</param> /// <returns>List of classes in the requested namespace</returns> public List<String> GetWMIClassList(string wmiNameSpace) { ManagementScope wmiScope = new ManagementScope(String.Format("\\\\{0}\\root\\{1}", this.Server, wmiNameSpace), this.ConnectionOptions); List<String> wmiClasses = new List<String>(); ManagementObjectSearcher wmiSearcher = new ManagementObjectSearcher(wmiScope, new WqlObjectQuery("SELECT * FROM meta_Class"), null); foreach (ManagementClass wmiClass in wmiSearcher.Get()) wmiClasses.Add(wmiClass["__CLASS"].ToString()); return wmiClasses; } /// <summary> /// Get a list of wmi properties for the specified class /// </summary> /// <param jobName="wmiNameSpace">WMI Namespace</param> /// <param jobName="wmiClass">WMI Class</param> /// <returns>List of properties for the class</returns> public List<String> GetWMIClassPropertyList(string wmiNameSpace, string wmiClass) { List<String> wmiClassProperties = new List<string>(); ManagementClass managementClass = GetWMIClass(wmiNameSpace, wmiClass); foreach (PropertyData property in managementClass.Properties) wmiClassProperties.Add(property.Name); return wmiClassProperties; } /// <summary> /// Returns a list of methods for the class /// </summary> /// <param jobName="wmiNameSpace"></param> /// <param jobName="wmiClass"></param> /// <returns></returns> public List<String> GetWMIClassMethodList(string wmiNameSpace, string wmiClass) { List<String> wmiClassMethods = new List<string>(); ManagementClass managementClass = GetWMIClass(wmiNameSpace, wmiClass); foreach (MethodData method in managementClass.Methods) wmiClassMethods.Add(method.Name); return wmiClassMethods; } /// <summary> /// Retrieve the specified management class /// </summary> /// <param jobName="wmiNameSpace">Namespace of the class</param> /// <param jobName="wmiClass">Type of the class</param> /// <returns></returns> public ManagementClass GetWMIClass(string wmiNameSpace, string wmiClass) { ManagementScope wmiScope = new ManagementScope(String.Format("\\\\{0}\\root\\{1}", this.Server, wmiNameSpace), this.ConnectionOptions); ManagementClass managementClass = null; ManagementObjectSearcher wmiSearcher = new ManagementObjectSearcher(wmiScope, new WqlObjectQuery(String.Format("SELECT * FROM meta_Class WHERE __CLASS = '{0}'", wmiClass)), null); foreach (ManagementClass wmiObject in wmiSearcher.Get()) managementClass = wmiObject; return managementClass; } /// <summary> /// Get an instance of the specficied class /// </summary> /// <param jobName="wmiNameSpace">Namespace of the classes</param> /// <param jobName="wmiClass">Type of the classes</param> /// <returns>Array of management classes</returns> public ManagementObject[] GetWMIClassObjects(string wmiNameSpace, string wmiClass) { ManagementScope wmiScope = new ManagementScope(String.Format("\\\\{0}\\root\\{1}", this.Server, wmiNameSpace), this.ConnectionOptions); List<ManagementObject> wmiClasses = new List<ManagementObject>(); ManagementObjectSearcher wmiSearcher = new ManagementObjectSearcher(wmiScope, new WqlObjectQuery(String.Format("SELECT * FROM {0}", wmiClass)), null); foreach (ManagementObject wmiObject in wmiSearcher.Get()) wmiClasses.Add(wmiObject); return wmiClasses.ToArray(); } /// <summary> /// Get a full list of services /// </summary> /// <returns></returns> public List<Service> GetServices() { return GetService(null); } /// <summary> /// Get a list of services /// </summary> /// <returns></returns> public List<Service> GetService(string name) { ManagementObject[] services = GetWMIClassObjects("CIMV2", "WIN32_Service"); List<Service> serviceList = new List<Service>(); for (int i = 0; i < services.Length; i++) { ManagementObject managementObject = services[i]; Service service = new Service(managementObject); service.Status = (string)managementObject["Status"]; service.Name = (string)managementObject["Name"]; service.DisplayName = (string)managementObject["DisplayName"]; service.PathName = (string)managementObject["PathName"]; service.ProcessId = (uint)managementObject["ProcessId"]; service.Started = (bool)managementObject["Started"]; service.StartMode = (string)managementObject["StartMode"]; service.ServiceType = (string)managementObject["ServiceType"]; service.InstallDate = (string)managementObject["InstallDate"]; service.Description = (string)managementObject["Description"]; service.Caption = (string)managementObject["Caption"]; if (String.IsNullOrEmpty(name) || name.Equals(service.Name, StringComparison.OrdinalIgnoreCase)) serviceList.Add(service); } return serviceList; } /// <summary> /// Get a list of processes /// </summary> /// <returns></returns> public List<Process> GetProcesses() { return GetProcess(null); } /// <summary> /// Get a list of processes /// </summary> /// <returns></returns> public List<Process> GetProcess(uint? processId) { ManagementObject[] processes = GetWMIClassObjects("CIMV2", "WIN32_Process"); List<Process> processList = new List<Process>(); for (int i = 0; i < processes.Length; i++) { ManagementObject managementObject = processes[i]; Process process = new Process(managementObject); process.Priority = (uint)managementObject["Priority"]; process.ProcessId = (uint)managementObject["ProcessId"]; process.Status = (string)managementObject["Status"]; DateTime createDate; if (ConvertFromWmiDate((string)managementObject["CreationDate"], out createDate)) process.CreationDate = createDate.ToString("dd-MMM-yyyy HH:mm:ss"); process.Caption = (string)managementObject["Caption"]; process.CommandLine = (string)managementObject["CommandLine"]; process.Description = (string)managementObject["Description"]; process.ExecutablePath = (string)managementObject["ExecutablePath"]; process.ExecutionState = (string)managementObject["ExecutionState"]; process.MaximumWorkingSetSize = (UInt32?)managementObject ["MaximumWorkingSetSize"]; process.MinimumWorkingSetSize = (UInt32?)managementObject["MinimumWorkingSetSize"]; process.KernelModeTime = (UInt64)managementObject["KernelModeTime"]; process.ThreadCount = (UInt32)managementObject["ThreadCount"]; process.UserModeTime = (UInt64)managementObject["UserModeTime"]; process.VirtualSize = (UInt64)managementObject["VirtualSize"]; process.WorkingSetSize = (UInt64)managementObject["WorkingSetSize"]; if (processId == null || process.ProcessId == processId.Value) processList.Add(process); } return processList; } /// <summary> /// Start the specified process /// </summary> /// <param jobName="commandLine"></param> /// <returns></returns> public bool StartProcess(string command, out int processId) { processId = int.MaxValue; ManagementClass processClass = GetWMIClass("CIMV2", "WIN32_Process"); object[] objectsIn = new object[4]; objectsIn[0] = command; processClass.InvokeMethod("Create", objectsIn); if (objectsIn[3] == null) return false; processId = int.Parse(objectsIn[3].ToString()); return true; } /// <summary> /// Schedule a process on the remote machine /// </summary> /// <param name="command"></param> /// <param name="scheduleTime"></param> /// <param name="jobName"></param> /// <returns></returns> public bool ScheduleProcess(string command, DateTime scheduleTime, out string jobName) { jobName = String.Empty; ManagementClass scheduleClass = GetWMIClass("CIMV2", "Win32_ScheduledJob"); object[] objectsIn = new object[7]; objectsIn[0] = command; objectsIn[1] = String.Format("********{0:00}{1:00}{2:00}.000000+060", scheduleTime.Hour, scheduleTime.Minute, scheduleTime.Second); objectsIn[5] = true; scheduleClass.InvokeMethod("Create", objectsIn); if (objectsIn[6] == null) return false; UInt32 scheduleid = (uint)objectsIn[6]; jobName = scheduleid.ToString(); return true; } /// <summary> /// Returns the current time on the remote server /// </summary> /// <returns></returns> public DateTime Now() { ManagementScope wmiScope = new ManagementScope(String.Format("\\\\{0}\\root\\{1}", this.Server, "CIMV2"), this.ConnectionOptions); ManagementClass managementClass = null; ManagementObjectSearcher wmiSearcher = new ManagementObjectSearcher(wmiScope, new WqlObjectQuery(String.Format("SELECT * FROM Win32_LocalTime")), null); DateTime localTime = DateTime.MinValue; foreach (ManagementObject time in wmiSearcher.Get()) { UInt32 day = (UInt32)time["Day"]; UInt32 month = (UInt32)time["Month"]; UInt32 year = (UInt32)time["Year"]; UInt32 hour = (UInt32)time["Hour"]; UInt32 minute = (UInt32)time["Minute"]; UInt32 second = (UInt32)time["Second"]; localTime = new DateTime((int)year, (int)month, (int)day, (int)hour, (int)minute, (int)second); }; return localTime; } /// <summary> /// Converts a wmi date into a proper date /// </summary> /// <param jobName="wmiDate">Wmi formatted date</param> /// <returns>Date time object</returns> private static bool ConvertFromWmiDate(string wmiDate, out DateTime properDate) { properDate = DateTime.MinValue; string properDateString; // check if string is populated if (String.IsNullOrEmpty(wmiDate)) return false; wmiDate = wmiDate.Trim().ToLower().Replace("*", "0"); string[] months = new string[] { "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" }; try { properDateString = String.Format("{0}-{1}-{2} {3}:{4}:{5}.{6}", wmiDate.Substring(6, 2), months[int.Parse(wmiDate.Substring(4, 2)) - 1], wmiDate.Substring(0, 4), wmiDate.Substring(8, 2), wmiDate.Substring(10, 2), wmiDate.Substring(12, 2), wmiDate.Substring(15, 6)); } catch (InvalidCastException) { return false; } catch (ArgumentOutOfRangeException) { return false; } // try and parse the new date if (!DateTime.TryParse(properDateString, out properDate)) return false; // true if conversion successful return true; } private bool m_disposed; #region IDisposable Members /// <summary> /// Managed dispose /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// Dispose of managed and unmanaged objects /// </summary> /// <param jobName="disposing"></param> public void Dispose(bool disposing) { if (disposing) { m_connectOptions = null; } } #endregion #region Properties private ConnectionOptions m_connectOptions; /// <summary> /// Gets or sets the management scope /// </summary> private ConnectionOptions ConnectionOptions { get { return m_connectOptions; } set { m_connectOptions = value; } } private String m_server; /// <summary> /// Gets or sets the server to connect to /// </summary> public String Server { get { return m_server; } set { m_server = value; } } #endregion } }

    Read the article

  • Repaint window problems

    - by nXqd
    #include "stdafx.h" // Mario Headers #include "GameMain.h" #define MAX_LOADSTRING 100 // Global Variables: HINSTANCE hInst; // current instance TCHAR szTitle[MAX_LOADSTRING]; // The title bar text TCHAR szWindowClass[MAX_LOADSTRING]; // the main window class name // Mario global variables ================= CGameMain* gGameMain; HWND hWnd; PAINTSTRUCT ps; // ======================================== // Forward declarations of functions included in this code module: ATOM MyRegisterClass(HINSTANCE hInstance); BOOL InitInstance(HINSTANCE, int); LRESULT CALLBACK WndProc(HWND, UINT, WPARAM, LPARAM); INT_PTR CALLBACK About(HWND, UINT, WPARAM, LPARAM); // My unprocess function ===================================== void OnCreate(HWND hWnd) { } void OnKeyUp(WPARAM wParam) { switch (wParam) { case VK_LEFT: gGameMain->KeyReleased(LEFT); break; case VK_UP: gGameMain->KeyReleased(UP); break; case VK_RIGHT: gGameMain->KeyReleased(RIGHT); break; case VK_DOWN: gGameMain->KeyReleased(DOWN); break; } } void OnKeyDown(HWND hWnd,WPARAM wParam) { switch (wParam) { case VK_LEFT: gGameMain->KeyPressed(LEFT); break; case VK_UP: gGameMain->KeyPressed(UP); break; case VK_RIGHT: gGameMain->KeyPressed(RIGHT); break; case VK_DOWN: gGameMain->KeyPressed(DOWN); break; } } void OnPaint(HWND hWnd) { HDC hdc = BeginPaint(hWnd,&ps); RECT rect; GetClientRect(hWnd,&rect); HDC hdcDouble = CreateCompatibleDC(hdc); HBITMAP hdcBitmap = CreateCompatibleBitmap(hdc,rect.right,rect.bottom); HBITMAP bmOld = (HBITMAP)SelectObject(hdcDouble, hdcBitmap); gGameMain->SetHDC(&hdcDouble); gGameMain->SendMessage(MESSAGE_PAINT); BitBlt(hdc,0,0,rect.right,rect.bottom,hdcDouble,0,0,SRCCOPY); SelectObject(hdcDouble,bmOld); DeleteDC(hdcDouble); DeleteObject(hdcBitmap); DeleteDC(hdc); } void OnDestroy() { gGameMain->isPlaying = false; EndPaint(hWnd,&ps); } // My unprocess function ===================================== ATOM MyRegisterClass(HINSTANCE hInstance) { WNDCLASSEX wcex; wcex.cbSize = sizeof(WNDCLASSEX); wcex.style = CS_HREDRAW | CS_VREDRAW; wcex.lpfnWndProc = WndProc; wcex.cbClsExtra = 0; wcex.cbWndExtra = 0; wcex.hInstance = hInstance; wcex.hIcon = LoadIcon(hInstance, MAKEINTRESOURCE(IDI_GDIMARIO)); wcex.hCursor = LoadCursor(NULL, IDC_ARROW); wcex.hbrBackground = (HBRUSH)(COLOR_WINDOW+1); wcex.lpszMenuName = MAKEINTRESOURCE(IDC_GDIMARIO); wcex.lpszClassName = szWindowClass; wcex.hIconSm = LoadIcon(wcex.hInstance, MAKEINTRESOURCE(IDI_SMALL)); return RegisterClassEx(&wcex); } BOOL InitInstance(HINSTANCE hInstance, int nCmdShow) { hInst = hInstance; // Store instance handle in our global variable hWnd = CreateWindow(szWindowClass, szTitle, WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, CW_USEDEFAULT, WIDTH, HEIGHT, 0, NULL, hInstance, NULL); if (!hWnd) { return FALSE; } // ---------------- Start gdiplus ------------------ GdiplusStartup(&gdiToken,&gdiStartInput,NULL); // ------------------------------------------------- // Init GameMain gGameMain = new CGameMain(); ShowWindow(hWnd, nCmdShow); UpdateWindow(hWnd); return TRUE; } LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) { int wmId, wmEvent; switch (message) { case WM_COMMAND: wmId = LOWORD(wParam); wmEvent = HIWORD(wParam); // Parse the menu selections: switch (wmId) { case IDM_ABOUT: DialogBox(hInst, MAKEINTRESOURCE(IDD_ABOUTBOX), hWnd, About); break; case IDM_EXIT: DestroyWindow(hWnd); break; default: return DefWindowProc(hWnd, message, wParam, lParam); } break; case WM_KEYDOWN: OnKeyDown(hWnd,wParam); break; case WM_KEYUP: OnKeyUp(wParam); break; case WM_CREATE: OnCreate(hWnd); break; case WM_PAINT: OnPaint(hWnd); break; case WM_DESTROY: OnDestroy(); PostQuitMessage(0); break; default: return DefWindowProc(hWnd, message, wParam, lParam); } return 0; } // Message handler for about box. INT_PTR CALLBACK About(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam) { UNREFERENCED_PARAMETER(lParam); switch (message) { case WM_INITDIALOG: return (INT_PTR)TRUE; case WM_COMMAND: if (LOWORD(wParam) == IDOK || LOWORD(wParam) == IDCANCEL) { EndDialog(hDlg, LOWORD(wParam)); return (INT_PTR)TRUE; } break; } return (INT_PTR)FALSE; } int APIENTRY _tWinMain(HINSTANCE hInstance,HINSTANCE hPrevInstance,LPTSTR lpCmdLine,int nCmdShow) { UNREFERENCED_PARAMETER(hPrevInstance); UNREFERENCED_PARAMETER(lpCmdLine); // TODO: Place code here. MSG msg; HACCEL hAccelTable; // Initialize global strings LoadString(hInstance, IDS_APP_TITLE, szTitle, MAX_LOADSTRING); LoadString(hInstance, IDC_GDIMARIO, szWindowClass, MAX_LOADSTRING); MyRegisterClass(hInstance); // Perform application initialization: if (!InitInstance (hInstance, nCmdShow)) { return FALSE; } hAccelTable = LoadAccelerators(hInstance, MAKEINTRESOURCE(IDC_GDIMARIO)); // Main message loop: // GameLoop PeekMessage(&msg,NULL,0,0,PM_NOREMOVE); while (gGameMain->isPlaying) { while (PeekMessage(&msg,NULL,0,0,PM_REMOVE)) { if (msg.message == WM_QUIT) break; TranslateMessage(&msg); DispatchMessage(&msg); } if (gGameMain->enterNextState) { gGameMain->SendMessage(MESSAGE_ENTER); gGameMain->enterNextState = false; } gGameMain->SendMessage(MESSAGE_UPDATE); InvalidateRect(hWnd,NULL,FALSE); /*if (gGameMain->exitCurrentState) { gGameMain->SendMessage(MESSAGE_EXIT); gGameMain->enterNextState = true; gGameMain->exitCurrentState = false; }*/ ::Sleep(gGameMain->timer); // Do your game stuff here } GdiplusShutdown(gdiToken); // Shut down gdiplus token return (int) msg.wParam; } I use InvalidateRect(hWnd,NULL,FALSE); for repaint window, but the problem I met is when I repaint without any changes in Game struct . First it paints my logo well, the second time ( just call InvalidateRect(hWnd,NULL,FALSE); without gGameMain-SendMessage(MESSAGE_ENTER); which is init some variables for painting . Thanks for reading this :)

    Read the article

  • Access violation in DirectX OMSetRenderTargets

    - by IDWMaster
    I receive the following error (Unhandled exception at 0x527DAE81 (d3d11_1sdklayers.dll) in Lesson2.Triangles.exe: 0xC0000005: Access violation reading location 0x00000000) when running the Triangle sample application for DirectX 11 in D3D_FEATURE_LEVEL_9_1. This error occurs at the OMSetRenderTargets function, as shown below, and does not happen if I remove that function from the program (but then, the screen is blue, and does not render the triangle) //// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF //// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO //// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A //// PARTICULAR PURPOSE. //// //// Copyright (c) Microsoft Corporation. All rights reserved #include #include #include "DirectXSample.h" #include "BasicMath.h" #include "BasicReaderWriter.h" using namespace Microsoft::WRL; using namespace Windows::UI::Core; using namespace Windows::Foundation; using namespace Windows::ApplicationModel::Core; using namespace Windows::ApplicationModel::Infrastructure; // This class defines the application as a whole. ref class Direct3DTutorialViewProvider : public IViewProvider { private: CoreWindow^ m_window; ComPtr m_swapChain; ComPtr m_d3dDevice; ComPtr m_d3dDeviceContext; ComPtr m_renderTargetView; public: // This method is called on application launch. void Initialize( _In_ CoreWindow^ window, _In_ CoreApplicationView^ applicationView ) { m_window = window; } // This method is called after Initialize. void Load(_In_ Platform::String^ entryPoint) { } // This method is called after Load. void Run() { // First, create the Direct3D device. // This flag is required in order to enable compatibility with Direct2D. UINT creationFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT; #if defined(_DEBUG) // If the project is in a debug build, enable debugging via SDK Layers with this flag. creationFlags |= D3D11_CREATE_DEVICE_DEBUG; #endif // This array defines the ordering of feature levels that D3D should attempt to create. D3D_FEATURE_LEVEL featureLevels[] = { D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0, D3D_FEATURE_LEVEL_9_3, D3D_FEATURE_LEVEL_9_1 }; ComPtr d3dDevice; ComPtr d3dDeviceContext; DX::ThrowIfFailed( D3D11CreateDevice( nullptr, // specify nullptr to use the default adapter D3D_DRIVER_TYPE_HARDWARE, nullptr, // leave as nullptr if hardware is used creationFlags, // optionally set debug and Direct2D compatibility flags featureLevels, ARRAYSIZE(featureLevels), D3D11_SDK_VERSION, // always set this to D3D11_SDK_VERSION &d3dDevice, nullptr, &d3dDeviceContext ) ); // Retrieve the Direct3D 11.1 interfaces. DX::ThrowIfFailed( d3dDevice.As(&m_d3dDevice) ); DX::ThrowIfFailed( d3dDeviceContext.As(&m_d3dDeviceContext) ); // After the D3D device is created, create additional application resources. CreateWindowSizeDependentResources(); // Create a Basic Reader-Writer class to load data from disk. This class is examined // in the Resource Loading sample. BasicReaderWriter^ reader = ref new BasicReaderWriter(); // Load the raw vertex shader bytecode from disk and create a vertex shader with it. auto vertexShaderBytecode = reader-ReadData("SimpleVertexShader.cso"); ComPtr vertexShader; DX::ThrowIfFailed( m_d3dDevice-CreateVertexShader( vertexShaderBytecode-Data, vertexShaderBytecode-Length, nullptr, &vertexShader ) ); // Create an input layout that matches the layout defined in the vertex shader code. // For this lesson, this is simply a float2 vector defining the vertex position. const D3D11_INPUT_ELEMENT_DESC basicVertexLayoutDesc[] = { { "POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }, }; ComPtr inputLayout; DX::ThrowIfFailed( m_d3dDevice-CreateInputLayout( basicVertexLayoutDesc, ARRAYSIZE(basicVertexLayoutDesc), vertexShaderBytecode-Data, vertexShaderBytecode-Length, &inputLayout ) ); // Load the raw pixel shader bytecode from disk and create a pixel shader with it. auto pixelShaderBytecode = reader-ReadData("SimplePixelShader.cso"); ComPtr pixelShader; DX::ThrowIfFailed( m_d3dDevice-CreatePixelShader( pixelShaderBytecode-Data, pixelShaderBytecode-Length, nullptr, &pixelShader ) ); // Create vertex and index buffers that define a simple triangle. float3 triangleVertices[] = { float3(-0.5f, -0.5f,13.5f), float3( 0.0f, 0.5f,0), float3( 0.5f, -0.5f,0), }; D3D11_BUFFER_DESC vertexBufferDesc = {0}; vertexBufferDesc.ByteWidth = sizeof(float3) * ARRAYSIZE(triangleVertices); vertexBufferDesc.Usage = D3D11_USAGE_DEFAULT; vertexBufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER; vertexBufferDesc.CPUAccessFlags = 0; vertexBufferDesc.MiscFlags = 0; vertexBufferDesc.StructureByteStride = 0; D3D11_SUBRESOURCE_DATA vertexBufferData; vertexBufferData.pSysMem = triangleVertices; vertexBufferData.SysMemPitch = 0; vertexBufferData.SysMemSlicePitch = 0; ComPtr vertexBuffer; DX::ThrowIfFailed( m_d3dDevice-CreateBuffer( &vertexBufferDesc, &vertexBufferData, &vertexBuffer ) ); // Once all D3D resources are created, configure the application window. // Allow the application to respond when the window size changes. m_window-SizeChanged += ref new TypedEventHandler( this, &Direct3DTutorialViewProvider::OnWindowSizeChanged ); // Specify the cursor type as the standard arrow cursor. m_window-PointerCursor = ref new CoreCursor(CoreCursorType::Arrow, 0); // Activate the application window, making it visible and enabling it to receive events. m_window-Activate(); // Enter the render loop. Note that tailored applications should never exit. while (true) { // Process events incoming to the window. m_window-Dispatcher-ProcessEvents(CoreProcessEventsOption::ProcessAllIfPresent); // Specify the render target we created as the output target. ID3D11RenderTargetView* targets[1] = {m_renderTargetView.Get()}; m_d3dDeviceContext-OMSetRenderTargets( 1, targets, NULL // use no depth stencil ); // Clear the render target to a solid color. const float clearColor[4] = { 0.071f, 0.04f, 0.561f, 1.0f }; //Code fails here m_d3dDeviceContext-ClearRenderTargetView( m_renderTargetView.Get(), clearColor ); m_d3dDeviceContext-IASetInputLayout(inputLayout.Get()); // Set the vertex and index buffers, and specify the way they define geometry. UINT stride = sizeof(float3); UINT offset = 0; m_d3dDeviceContext-IASetVertexBuffers( 0, 1, vertexBuffer.GetAddressOf(), &stride, &offset ); m_d3dDeviceContext-IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST); // Set the vertex and pixel shader stage state. m_d3dDeviceContext-VSSetShader( vertexShader.Get(), nullptr, 0 ); m_d3dDeviceContext-PSSetShader( pixelShader.Get(), nullptr, 0 ); // Draw the cube. m_d3dDeviceContext-Draw(3,0); // Present the rendered image to the window. Because the maximum frame latency is set to 1, // the render loop will generally be throttled to the screen refresh rate, typically around // 60Hz, by sleeping the application on Present until the screen is refreshed. DX::ThrowIfFailed( m_swapChain-Present(1, 0) ); } } // This method is called before the application exits. void Uninitialize() { } private: // This method is called whenever the application window size changes. void OnWindowSizeChanged( _In_ CoreWindow^ sender, _In_ WindowSizeChangedEventArgs^ args ) { m_renderTargetView = nullptr; CreateWindowSizeDependentResources(); } // This method creates all application resources that depend on // the application window size. It is called at app initialization, // and whenever the application window size changes. void CreateWindowSizeDependentResources() { if (m_swapChain != nullptr) { // If the swap chain already exists, resize it. DX::ThrowIfFailed( m_swapChain-ResizeBuffers( 2, 0, 0, DXGI_FORMAT_R8G8B8A8_UNORM, 0 ) ); } else { // If the swap chain does not exist, create it. DXGI_SWAP_CHAIN_DESC1 swapChainDesc = {0}; swapChainDesc.Stereo = false; swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT; swapChainDesc.Scaling = DXGI_SCALING_NONE; swapChainDesc.Flags = 0; // Use automatic sizing. swapChainDesc.Width = 0; swapChainDesc.Height = 0; // This is the most common swap chain format. swapChainDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; // Don't use multi-sampling. swapChainDesc.SampleDesc.Count = 1; swapChainDesc.SampleDesc.Quality = 0; // Use two buffers to enable flip effect. swapChainDesc.BufferCount = 2; // We recommend using this swap effect for all applications. swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL; // Once the swap chain description is configured, it must be // created on the same adapter as the existing D3D Device. // First, retrieve the underlying DXGI Device from the D3D Device. ComPtr dxgiDevice; DX::ThrowIfFailed( m_d3dDevice.As(&dxgiDevice) ); // Ensure that DXGI does not queue more than one frame at a time. This both reduces // latency and ensures that the application will only render after each VSync, minimizing // power consumption. DX::ThrowIfFailed( dxgiDevice-SetMaximumFrameLatency(1) ); // Next, get the parent factory from the DXGI Device. ComPtr dxgiAdapter; DX::ThrowIfFailed( dxgiDevice-GetAdapter(&dxgiAdapter) ); ComPtr dxgiFactory; DX::ThrowIfFailed( dxgiAdapter-GetParent( __uuidof(IDXGIFactory2), &dxgiFactory ) ); // Finally, create the swap chain. DX::ThrowIfFailed( dxgiFactory-CreateSwapChainForImmersiveWindow( m_d3dDevice.Get(), DX::GetIUnknown(m_window), &swapChainDesc, nullptr, // allow on all displays &m_swapChain ) ); } // Once the swap chain is created, create a render target view. This will // allow Direct3D to render graphics to the window. ComPtr backBuffer; DX::ThrowIfFailed( m_swapChain-GetBuffer( 0, __uuidof(ID3D11Texture2D), &backBuffer ) ); DX::ThrowIfFailed( m_d3dDevice-CreateRenderTargetView( backBuffer.Get(), nullptr, &m_renderTargetView ) ); // After the render target view is created, specify that the viewport, // which describes what portion of the window to draw to, should cover // the entire window. D3D11_TEXTURE2D_DESC backBufferDesc = {0}; backBuffer-GetDesc(&backBufferDesc); D3D11_VIEWPORT viewport; viewport.TopLeftX = 0.0f; viewport.TopLeftY = 0.0f; viewport.Width = static_cast(backBufferDesc.Width); viewport.Height = static_cast(backBufferDesc.Height); viewport.MinDepth = D3D11_MIN_DEPTH; viewport.MaxDepth = D3D11_MAX_DEPTH; m_d3dDeviceContext-RSSetViewports(1, &viewport); } }; // This class defines how to create the custom View Provider defined above. ref class Direct3DTutorialViewProviderFactory : IViewProviderFactory { public: IViewProvider^ CreateViewProvider() { return ref new Direct3DTutorialViewProvider(); } }; [Platform::MTAThread] int main(array^) { auto viewProviderFactory = ref new Direct3DTutorialViewProviderFactory(); Windows::ApplicationModel::Core::CoreApplication::Run(viewProviderFactory); return 0; }

    Read the article

  • Problem with Freetype and OpenGL

    - by Morgan
    Hey, i'm having a weird issue with drawing text in openGL loaded with the Freetype 2 library. Here is a screenshot of what I'm seeing. http://img203.imageshack.us/img203/3316/freetypeweird.png Here are my code bits for loading and rendering my text. class Font { Font(const String& filename) { if (FT_New_Face(Font::ftLibrary, "arial.ttf", 0, &mFace)) { cout << "UH OH!" << endl; } FT_Set_Char_Size(mFace, 16 * 64, 16 * 64, 72, 72); } Glyph* GetGlyph(const unsigned char ch) { if(FT_Load_Char(mFace, ch, FT_LOAD_RENDER)) cout << "OUCH" << endl; FT_Glyph glyph; if(FT_Get_Glyph( mFace->glyph, &glyph )) cout << "OUCH" << endl; FT_BitmapGlyph bitmap_glyph = (FT_BitmapGlyph)glyph; Glyph* thisGlyph = new Glyph; thisGlyph->buffer = bitmap_glyph->bitmap.buffer; thisGlyph->width = bitmap_glyph->bitmap.width; thisGlyph->height = bitmap_glyph->bitmap.rows; return thisGlyph; } }; The relevant glyph information (width, height, buffer) is stored in the following struct struct Glyph { GLubyte* buffer; Uint width; Uint height; }; And finally, to render it, I have this class called RenderFont. class RenderFont { RenderFont(Font* font) { mTextureIds = new GLuint[128]; mFirstDisplayListId=glGenLists(128); glGenTextures( 128, mTextureIds ); for(unsigned char i=0;i<128;i++) { MakeDisplayList(font, i); } } void MakeDisplayList(Font* font, unsigned char ch) { Glyph* glyph = font->GetGlyph(ch); glBindTexture( GL_TEXTURE_2D, mTextureIds[ch]); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, glyph->width, glyph->height, 0, GL_ALPHA, GL_UNSIGNED_BYTE, glyph->buffer); glNewList(mFirstDisplayListId+ch,GL_COMPILE); glBindTexture(GL_TEXTURE_2D, mTextureIds[ch]); glBegin(GL_QUADS); glTexCoord2d(0,1); glVertex2f(0,glyph->height); glTexCoord2d(0,0); glVertex2f(0,0); glTexCoord2d(1,0); glVertex2f(glyph->width,0); glTexCoord2d(1,1); glVertex2f(glyph->width,glyph->height); glEnd(); glTranslatef(16, 0, 0); glEndList(); } void Draw(const String& text, Uint size, const TransformComponent* transform, const Color32* color) { glEnable(GL_TEXTURE_2D); glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); glTranslatef(100, 250, 0.0f); glListBase(mFirstDisplayListId); glCallLists(text.length(), GL_UNSIGNED_BYTE, text.c_str()); glDisable(GL_TEXTURE_2D); glDisable(GL_BLEND); glLoadIdentity(); } private: GLuint mFirstDisplayListId; GLuint* mTextureIds; }; Can anybody see anything weird going on here that would cause the garbled text? It's strange because if I change the font size, or the DPI, then some of the letters that display correctly become garbled, and other letters that were garbled before then display correctly.

    Read the article

  • How to hide/show a Process using c#?

    - by aF
    Hello, While executing my program, I want to hide/minimize Microsoft Speech Recognition Application: and at the end I want to show/maximize using c#! This process is not started by me so I can't give control the process startInfo. I've tried to use user32.dll methods such as: ShowWindow AnimatedWindows AnimatedWindows With all of them I have the same problem. I can hide the windows (althought I have to call one of the methods two times with SW_HIDE option), but when I call the method with a SW_SHOW flag, it simply doesn't shows.. How can I maximize/show after hiding the process? Thanks in advance! Here is some pieces of the code, now implemented to use SetWindowPlacement: { [DllImport("user32.dll")] [return: MarshalAs(UnmanagedType.Bool)] public static extern bool GetWindowPlacement(IntPtr hWnd, ref WINDOWPLACEMENT lpwndpl); [DllImport("user32.dll", SetLastError = true)] [return: MarshalAs(UnmanagedType.Bool)] static extern bool SetWindowPlacement(IntPtr hWnd, [In] ref WINDOWPLACEMENT lpwndpl); [DllImport("user32.dll")] public static extern Boolean ShowWindowAsync(IntPtr hWnd, Int32 nCmdShow); [DllImport("user32.dll")] public static extern Boolean SetForegroundWindow(IntPtr hWnd); [DllImport("user32.dll")] public static extern Boolean ShowWindow(IntPtr hWnd, Int32 nCmdShow); [DllImport("user32.dll")] public static extern Boolean AnimateWindow(IntPtr hWnd, uint dwTime, uint dwFlags); [DllImport("dwmapi.dll")] public static extern int DwmSetWindowAttribute(IntPtr hwnd, uint dwAttribute, IntPtr pvAttribute, IntPtr lol); //Definitions For Different Window Placement Constants const UInt32 SW_HIDE = 0; const UInt32 SW_SHOWNORMAL = 1; const UInt32 SW_NORMAL = 1; const UInt32 SW_SHOWMINIMIZED = 2; const UInt32 SW_SHOWMAXIMIZED = 3; const UInt32 SW_MAXIMIZE = 3; const UInt32 SW_SHOWNOACTIVATE = 4; const UInt32 SW_SHOW = 5; const UInt32 SW_MINIMIZE = 6; const UInt32 SW_SHOWMINNOACTIVE = 7; const UInt32 SW_SHOWNA = 8; const UInt32 SW_RESTORE = 9; public sealed class AnimateWindowFlags { public const int AW_HOR_POSITIVE = 0x00000001; public const int AW_HOR_NEGATIVE = 0x00000002; public const int AW_VER_POSITIVE = 0x00000004; public const int AW_VER_NEGATIVE = 0x00000008; public const int AW_CENTER = 0x00000010; public const int AW_HIDE = 0x00010000; public const int AW_ACTIVATE = 0x00020000; public const int AW_SLIDE = 0x00040000; public const int AW_BLEND = 0x00080000; } public struct WINDOWPLACEMENT { public int length; public int flags; public int showCmd; public System.Drawing.Point ptMinPosition; public System.Drawing.Point ptMaxPosition; public System.Drawing.Rectangle rcNormalPosition; } //this works param = new WINDOWPLACEMENT(); param.length = Marshal.SizeOf(typeof(WINDOWPLACEMENT)); param.showCmd = (int)SW_HIDE; lol = SetWindowPlacement(theprocess.MainWindowHandle, ref param); // this doesn't work WINDOWPLACEMENT param = new WINDOWPLACEMENT(); param.length = Marshal.SizeOf(typeof(WINDOWPLACEMENT)); param.showCmd = SW_SHOW; lol = GetWindowPlacement(theprocess.MainWindowHandle, ref param);

    Read the article

  • Initializing and drawing a mesh using OpenTK

    - by Boreal
    I'm implementing a "Mesh" class to use in my OpenTK game. You pass in a vertex array and an index array, and then you can call Mesh.Draw() to draw it using a shader. I've heard VBO's and VAO's are the way to go for this approach, but nowhere have I found a guide that shows how to get Data Video Memory Shader. Can someone give me a quick rundown of how this works? EDIT: So far, I have this: struct Vertex { public Vector3 position; public Vector3 normal; public Vector3 color; public static int memSize = 9 * sizeof(float); public static byte[] memOffset = { 0, 3 * sizeof(float), 6 * sizeof(float) }; } class Mesh { private uint vbo; private uint ibo; // stores the numbers of vertices and indices private int numVertices; private int numIndices; public Mesh(int numVertices, Vertex[] vertices, int numIndices, ushort[] indices) { // set numbers this.numVertices = numVertices; this.numIndices = numIndices; // generate buffers GL.GenBuffers(1, out vbo); GL.GenBuffers(1, out ibo); GL.BindBuffer(BufferTarget.ArrayBuffer, vbo); GL.BindBuffer(BufferTarget.ElementArrayBuffer, ibo); // send data to the buffers GL.BufferData(BufferTarget.ArrayBuffer, new IntPtr(Vertex.memSize * numVertices), vertices, BufferUsageHint.StaticDraw); GL.BufferData(BufferTarget.ElementArrayBuffer, new IntPtr(sizeof(ushort) * numIndices), indices, BufferUsageHint.StaticDraw); } public void Render() { // bind buffers GL.BindBuffer(BufferTarget.ArrayBuffer, vbo); GL.BindBuffer(BufferTarget.ElementArrayBuffer, ibo); // define offsets GL.VertexPointer(3, VertexPointerType.Float, Vertex.memSize, new IntPtr(Vertex.memOffset[0])); GL.NormalPointer(NormalPointerType.Float, Vertex.memSize, new IntPtr(Vertex.memOffset[1])); GL.ColorPointer(3, ColorPointerType.Float, Vertex.memSize, new IntPtr(Vertex.memOffset[2])); // draw GL.DrawElements(BeginMode.Triangles, numIndices, DrawElementsType.UnsignedInt, (IntPtr)0); } } class Application : GameWindow { Mesh triangle; protected override void OnLoad(EventArgs e) { base.OnLoad(e); GL.ClearColor(0.1f, 0.2f, 0.5f, 0.0f); GL.Enable(EnableCap.DepthTest); GL.Enable(EnableCap.VertexArray); GL.Enable(EnableCap.NormalArray); GL.Enable(EnableCap.ColorArray); Vertex v0 = new Vertex(); v0.position = new Vector3(-1.0f, -1.0f, 4.0f); v0.normal = new Vector3(0.0f, 0.0f, -1.0f); v0.color = new Vector3(1.0f, 1.0f, 0.0f); Vertex v1 = new Vertex(); v1.position = new Vector3(1.0f, -1.0f, 4.0f); v1.normal = new Vector3(0.0f, 0.0f, -1.0f); v1.color = new Vector3(1.0f, 0.0f, 0.0f); Vertex v2 = new Vertex(); v2.position = new Vector3(0.0f, 1.0f, 4.0f); v2.normal = new Vector3(0.0f, 0.0f, -1.0f); v2.color = new Vector3(0.2f, 0.9f, 1.0f); Vertex[] va = { v0, v1, v2 }; ushort[] ia = { 0, 1, 2 }; triangle = new Mesh(3, va, 3, ia); } protected override void OnRenderFrame(FrameEventArgs e) { base.OnRenderFrame(e); GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit); Matrix4 modelview = Matrix4.LookAt(Vector3.Zero, Vector3.UnitZ, Vector3.UnitY); GL.MatrixMode(MatrixMode.Modelview); GL.LoadMatrix(ref modelview); triangle.Render(); SwapBuffers(); } } It doesn't draw anything.

    Read the article

< Previous Page | 2 3 4 5 6 7 8 9 10 11 12 13  | Next Page >