I'm creating a SQL client with WinAPI. Every time a user submits a query, the current columns and items of my ListView are deleted, then new columns and items are created based on the results of the query.
I've noticed through Visual Studio heap profiler, that every time the columns and items are created, 3 objects and ~60 bytes are allocated, and I can't figure out why. Examining the snapshot suggests this has to do with adding SubItems via the ListView_SetItem function (_ListView_OnSetItem is the last thing I can identify in Stacks View).
The results I'm using to test with create 4 columns and 36 items/subitems.
To the best of my knowledge, I've implemented the ListView stuff correctly. Still, here are my two functions that reset and populate the ListView. Any help is appreciated.
void ResetListView(HWND hWnd) {
HWND hWndHdr = (HWND)SendMessage(GetDlgItem(hWnd, IDQ_LISTVIEW), LVM_GETHEADER, 0, 0);
int numColumns = (int)SendMessage(hWndHdr, HDM_GETITEMCOUNT, 0, 0L);
ListView_DeleteAllItems(GetDlgItem(hWnd, IDQ_LISTVIEW));
for (int i = 0; i < numColumns; i++)
ListView_DeleteColumn(GetDlgItem(hWnd, IDQ_LISTVIEW), 0);
}
void CreateListView(HWND hWnd, QueryResults * queryResults) {
LVCOLUMN lvc;
ZeroMemory(&lvc, sizeof(LVCOLUMN));
lvc.mask = LVCF_FMT | LVCF_WIDTH | LVCF_TEXT | LVCF_SUBITEM;
lvc.fmt = LVCFMT_CENTER;
int numHeaders = queryResults->numHeaders;
// Add columns
for (int i = 0; i < numHeaders; i++) {
lvc.iSubItem = i;
lvc.pszText = queryResults->headers[i];
lvc.cx = 100;
ListView_InsertColumn(GetDlgItem(hWnd, IDQ_LISTVIEW), i, (LPARAM)&lvc);
}
LVITEM lvi;
ZeroMemory(&lvi, sizeof(LVITEM));
lvi.mask = LVIF_TEXT;
// Add items and subitems
for (int i = 0; i < queryResults->rows.size(); i++) {
lvi.iItem = i;
lvi.pszText = queryResults->rows[i]->cells[0];
lvi.iSubItem = 0;
ListView_InsertItem(GetDlgItem(hWnd, IDQ_LISTVIEW), (LPARAM)&lvi);
for (int j = 1; j < numHeaders; j++) {
lvi.pszText = queryResults->rows[i]->cells[j];
lvi.iSubItem = j;
ListView_SetItem(GetDlgItem(hWnd, IDQ_LISTVIEW), (LPARAM)&lvi);
}
}
return;
}
Related
Added a picture control to my MFC dialog. Set its type to Frame. Need to display 8 bit buffer (NOT LOADED From file) on the picture control. The image is scaled down, repeated , tilted diagonally. Not correct. I could not make it fit the control properly. resized the control but still the same problem.
w = 750;
h = 850;
bpp = 8
void CDispDlg::BuildINfoAndDisp(int w, int h)
{
m_pbmi = (BITMAPINFO*) new BYTE[sizeof(BITMAPINFO) + 256) * sizeof(RGBQUAD)];
m_pbmi->bmiHeader.bSize=sizeof(BITMAPINFOHEADER);
m_pbmi->bmiHeader.biPlanes = 1;
m_pbmi->bmiHeader.biBitCount = bpp;
m_pbmi->bmiHeader.biCompression = BI_RGB;
m_pbmi->bmiHeader.biSizeImage = 0;
m_pbmi->bmiHeader.biXpelsPerMeter = 0;
m_pbmi->bmiHeader.biYpelsPerMeter = 0;
m_pbmi->bmiHeader.biClrUsed = 0;
m_pbmi->bmiHeader.biClrImportant = 0;
for (int i = 0; i< 256; i++)
{
m_pbmi->bmiColors[i].rgbBlue = i;
m_pbmi->bmiColors[i].rgbGreen = i;
m_pbmi->bmiColors[i].rgbBRed = i;
m_pbmi->bmiColors[i].rgbReserved = 0;
}
m_pbmi->bmiHeader.biWidth = w;
m_pbmi->bmiHeader.biHeight = -h;
CClientDC dc(GetDlgItem(IDC_PicCntrl_DSIP));
CRect rect;
GetDlgItem(IDC_PicCntrl_DSIP)->GetClientRect(&rect);
SetStrechBltMode(dc.GetSafeHdc(), COLORNOCOLOR);
StretchBIBits(dc.GetSafeHdc(), 0 ,0 , rect.Width, rect.Height, 0,0, m_wImage, m_hImage, &m_U8imageBuf, m_pbmi,DIB_RGB_COLORS, SRCCOPY);
}
I'm working with Comctl32.dll's list-view control and attempting to divide the width of it amongst 10 columns, auto-sizing to match the strings in their respective headers. I am using the ListView_SetColumnWidth macro with the LVSCW_AUTOSIZE_USEHEADER value, and it's working with every column except the first one:
Here's the code I think is relevant, but please let me know if it's not enough:
HWND hListView;
hListView = CreateWindowEx(
WS_EX_CLIENTEDGE,
WC_LISTVIEW,
L"",
WS_CHILD | WS_VISIBLE |
LVS_REPORT,
200, 10, 800, 150,
hwnd,
(HMENU)IDC_MAIN_LV,
GetModuleHandle(NULL),
NULL);
if (hListView == NULL) {
MessageBox(hwnd, L"Could not create list box.", L"Error!", MB_OK | MB_ICONERROR);
}
LVCOLUMN lvc;
WCHAR buffer[256];
lvc.mask = LVCF_FMT | LVCF_WIDTH | LVCF_TEXT | LVCF_SUBITEM;
for (int col = 0; col < C_COLUMNS; col++) {
lvc.iSubItem = col;
lvc.pszText = buffer;
lvc.cx = 100;
lvc.fmt = LVCFMT_CENTER;
LoadString(
GetModuleHandle(NULL),
IDS_HP + col,
buffer,
sizeof(buffer));
if (ListView_InsertColumn(hListView, col, &lvc) == -1) {
MessageBox(hwnd, L"Could not create list box.", L"Error!", MB_OK | MB_ICONERROR);
}
ListView_SetColumnWidth(hListView, col, LVSCW_AUTOSIZE_USEHEADER);
}
SetFocus(hListBox);
This is my first project after learning the basics of C++ so I'm sure I'm doing everything shown here the worst way possible. (: Specifically, I'd like to know what's causing this particular error please. Thanks for reading!
From the ListView_SetColumnWidth documentation:
LVSCW_AUTOSIZE_USEHEADER
Automatically sizes the column to fit the header text. If you use this value with the last column, its width is set to fill the remaining width of the list-view control.
When you use this for the first time, there's only one column, so it is the last column and therefore gets resized to the full width of the listview.
The solution is to add all your columns first and then autosize them.
for (int col = 0; col < C_COLUMNS; col++) {
lvc.iSubItem = col;
lvc.pszText = buffer;
lvc.cx = 100;
lvc.fmt = LVCFMT_CENTER;
LoadString(
GetModuleHandle(NULL),
IDS_HP + col,
buffer,
sizeof(buffer));
ListView_InsertColumn(hListView, col, &lvc);
}
for (int col = 0; col < C_COLUMNS; col++) {
ListView_SetColumnWidth(hListView, col, LVSCW_AUTOSIZE_USEHEADER);
}
I'm trying to learn how to use a screen buffer, and I made a mistake that I do not understand. These are the settings for my screen buffer:
wchar_t* screen = new wchar_t[nScreenWidth * nScreenHeight];
for (int i = 0; i < nScreenWidth * nScreenHeight; i++) {
screen[i] = L' ';
}
HANDLE hConsole = CreateConsoleScreenBuffer(GENERIC_READ | GENERIC_WRITE, 0, NULL, CONSOLE_TEXTMODE_BUFFER, NULL);
SetConsoleActiveScreenBuffer(hConsole);
DWORD dwBytesWritten = 0;
WriteConsoleOutputCharacterW(hConsole, screen, (nScreenWidth * nScreenHeight), { 0,0 }, &dwBytesWritten);
I manage to print to it my 2D array but it is weird that it's lying flat in my terminal window (see link to print screen).
Small print screen of my failed 2D array
It's as if all the new lines have been removed. This is my loop that prints my 2D array to "screen".
int g = 0;
while (g < 100) {
WriteConsoleOutputCharacterW(hConsole, screen, (nScreenWidth * nScreenHeight), { 0,0 }, &dwBytesWritten);
for (int i = 0; i < field.difficulty; i++) {
std::this_thread::sleep_for(std::chrono::milliseconds(50));
}
for (int y = 0; y < field.nFieldHeight; y++) {
for (int x = 0; x < field.nFieldWidth; x++) {
screen[(y + 2) * field.nFieldWidth + (x + 2)] = field.matrix[x][y];
}
}
}
Is it possible that I need to write to a coordinate in the screen buffer every time I print a character?
By default the console window is re-sizable and the this causes the output to wrap. You can prevent this by using the following:
// Get console window
HWND hwndWindow = GetConsoleWindow();
// Prevent resize & maximize
LONG lFlags = GetWindowLong(hwndWindow , GWL_STYLE) & ~WS_MAXIMIZEBOX & ~WS_SIZEBOX & ~WS_HSCROLL;
SetWindowLong(hwndWindow , GWL_STYLE, lFlags);
// Get console handle
HANDLE hConsole = GetStdHandle(STD_OUTPUT_HANDLE);
// Set window and buffer size
_SMALL_RECT consoleRect = { 0, 0, SCREEN_W - 1, SCREEN_H - 1 };
SetConsoleScreenBufferSize(hConsole, { SCREEN_W, SCREEN_H });
SetConsoleWindowInfo(hConsole, TRUE, &consoleRect);
I'm quite new to WinAPI, so I sometimes make a lot of basic mistakes. You've been warned, let's move to my problem :P/
I want to make something like a grid. If the user click one of the grid's fields, there should appear a bitmap. I've made a bitmap of field that I want to use as a button. At the start user inputs a size of the grid, so I've made a dynamic library of buttons with that bitmap. Unfortunately, I have no idea how to deal with them when they are clicked. Here's my code:
//there I create my window. I also make a global variable bool* table and HWND next.
table = new bool[x*y];
for (int i = 0; i < x*y; ++i)
table[i] = 0;
//the table represents if the fields are already filled or not.
HWND* buttons = new HWND[x*y];
next = CreateWindowEx(4, _T("BUTTON"), _T("MOVE"), WS_CHILD | WS_VISIBLE, x * 12 - 25, (y + 4) * 25 - 90, 100, 50, hwnd, NULL, hThisInstance, NULL);
for (int i = 0; i < x; ++i)
{
for (int j = 0; j < y; ++j)
{
buttons[i + j * x] = CreateWindowEx(0, _T("BUTTON"), NULL, WS_CHILD | WS_VISIBLE | BS_BITMAP, 0 + i * 25, 0 + j * 25, 25, 25, hwnd, NULL, hThisInstance, NULL);
SendMessage(przyciski[i + j * x], BM_SETIMAGE, (WPARAM)IMAGE_BITMAP, (LPARAM)field);
}
}
And now in WindowProcedure():
switch (message)
{
case WM_COMMAND:
if ((HWND)lParam == next) /* there will be some code in the future ;) */;
else
{
//So here I need to set correct the value in table to 1
//I have a handle to clicked button (HWND)lParam, but I don't know how to get it's position in the table
}
break;
I tried to do some struct with HWND and int x, y, but I still don't know how to manage this. BTW, the code might look very old, I need to create an app that Windows XP can run (it's a project for school, do not inquire :P) and in addition I use a very old tutorial.
Assign an ID for each button using the HMENU parameter. The ID cannot be zero, therefore add an arbitrary offset, for example 100:
buttons[i + j * x] = CreateWindowEx(0, _T("BUTTON"), NULL,
WS_CHILD | WS_VISIBLE | BS_BITMAP, 0 + i * 25, 0 + j * 25, 25, 25,
hwnd, HMENU(100 + i + j * x), hThisInstance, NULL);
You can also extract the row and column from button_index, knowing the total rows and total columns. Example:
case WM_COMMAND:
{
if (HIWORD(wParam) == BN_CLICKED)
{
int id = LOWORD(wParam);
int button_index = id - 100;
if (button_index >= 0 && button_index < x * y)
{
int row = button_index / x;
int column = button_index % x;
...
}
...
}
break;
}
I have a ListView control with 4 columns that is initialized in the WM_CREATE proc.
hListView1 = CreateWindowEx(WS_EX_CLIENTEDGE, WC_LISTVIEW, NULL, WS_CHILD|WS_VSCROLL|WS_HSCROLL|WS_VISIBLE|LVS_REPORT|LVS_SHOWSELALWAYS, 230, 20, 300, 250, hwnd, (HMENU)ID_EDIT1, GetModuleHandle(NULL), NULL);
ListView_SetExtendedListViewStyle(hListView1, LVS_EX_FULLROWSELECT | LVS_EX_HEADERDRAGDROP);
lvCol.mask = LVCF_FMT | LVCF_WIDTH | LVCF_TEXT | LVCF_SUBITEM;
lvCol.fmt = LVCFMT_LEFT;
lvCol.iSubItem=0;
lvCol.cx=30;
lvCol.pszText="";
ListView_InsertColumn(hListView1, 0, &lvCol);
lvCol.iSubItem=1;
lvCol.cx=150;
lvCol.pszText="Name";
ListView_InsertColumn(hListView1, 1, &lvCol);
lvCol.iSubItem=2;
lvCol.cx=50;
lvCol.pszText="Size";
ListView_InsertColumn(hListView1, 2, &lvCol);
lvCol.iSubItem=3;
lvCol.cx=80;
lvCol.pszText="Modified";
ListView_InsertColumn(hListView1, 3, &lvCol);
Then i have a function that will insert the items (it works fine until i call deleteallitems)
...
LVITEM lvItem;
j = 0;
while(FindNextFile(hFind,&FindFileData)){
lvItem.iItem = j;
lvItem.iImage = 1;
if(FindFileData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY){
lvItem.iImage = 0;
}
ListView_InsertItem(hListView1, &lvItem);
ListView_SetItemText(hListView1, j, 1, FindFileData.cFileName);
ListView_SetItemText(hListView1, j, 2, msg1);
ListView_SetItemText(hListView1, j, 3, msg2);
j++;
}
But then whenever i call
ListView_DeleteAllItems(hListView1);
if after i call my function that insert items, my listview is cleared (the columns are still there) but no new items are insered..
I heard about indexes that are not cleared but i couldnt figured it out.
Thanks in advance ;-)
Solution :
Added
lvItem.mask = LVIF_IMAGE | LVIF_STATE;
lvItem.state = 0;
lvItem.stateMask = 0;
lvItem.iSubItem = 0;
You are not setting lvItem.mask, so ListView_InsertItem doesn't know which fields are valid and which aren't.
Try something like this:
...
LVITEM lvItem;
lvItem.mask = LVIF_IMAGE | LVIF_DI_SETITEM;
j = 0;
...