kvm-guest-drivers-windows
1076 строк · 28.1 Кб
1/*
2* Copyright (C) 2019-2020 Red Hat, Inc.
3*
4* Written By: Vadim Rozenfeld <vrozenfe@redhat.com>
5*
6* Redistribution and use in source and binary forms, with or without
7* modification, are permitted provided that the following conditions
8* are met :
9* 1. Redistributions of source code must retain the above copyright
10* notice, this list of conditions and the following disclaimer.
11* 2. Redistributions in binary form must reproduce the above copyright
12* notice, this list of conditions and the following disclaimer in the
13* documentation and / or other materials provided with the distribution.
14* 3. Neither the names of the copyright holders nor the names of their contributors
15* may be used to endorse or promote products derived from this software
16* without specific prior written permission.
17* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND
18* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20* ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE
21* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27* SUCH DAMAGE.
28*/
29
30#include "viogpu_queue.h"
31#include "baseobj.h"
32#if !DBG
33#include "viogpu_queue.tmh"
34#endif
35
36static BOOLEAN BuildSGElement(VirtIOBufferDescriptor* sg, PVOID buf, ULONG size)
37{
38if (size != 0 && MmIsAddressValid(buf))
39{
40sg->length = min(size, PAGE_SIZE);
41sg->physAddr = MmGetPhysicalAddress(buf);
42return TRUE;
43}
44return FALSE;
45}
46
47VioGpuQueue::VioGpuQueue()
48{
49m_pBuf = NULL;
50m_Index = (UINT)-1;
51m_pVIODevice = NULL;
52m_pVirtQueue = NULL;
53KeInitializeSpinLock(&m_SpinLock);
54}
55
56VioGpuQueue::~VioGpuQueue()
57{
58Close();
59}
60
61void VioGpuQueue::Close(void)
62{
63KIRQL SavedIrql;
64Lock(&SavedIrql);
65m_pVirtQueue = NULL;
66Unlock(SavedIrql);
67}
68
69BOOLEAN VioGpuQueue::Init(
70_In_ VirtIODevice* pVIODevice,
71_In_ struct virtqueue* pVirtQueue,
72_In_ UINT index)
73{
74if ((pVIODevice == NULL) ||
75(pVirtQueue == NULL)) {
76return FALSE;
77}
78m_pVIODevice = pVIODevice;
79m_pVirtQueue = pVirtQueue;
80m_Index = index;
81EnableInterrupt();
82return TRUE;
83}
84
85_IRQL_requires_max_(DISPATCH_LEVEL)
86_IRQL_saves_global_(OldIrql, Irql)
87_IRQL_raises_(DISPATCH_LEVEL)
88void VioGpuQueue::Lock(KIRQL* Irql)
89{
90KIRQL SavedIrql = KeGetCurrentIrql();
91DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s at IRQL %d\n", __FUNCTION__, SavedIrql));
92
93if (SavedIrql < DISPATCH_LEVEL) {
94KeAcquireSpinLock(&m_SpinLock, &SavedIrql);
95}
96else if (SavedIrql == DISPATCH_LEVEL) {
97KeAcquireSpinLockAtDpcLevel(&m_SpinLock);
98}
99else {
100VioGpuDbgBreak();
101}
102*Irql = SavedIrql;
103
104DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
105}
106
107_IRQL_requires_(DISPATCH_LEVEL)
108_IRQL_restores_global_(OldIrql, Irql)
109void VioGpuQueue::Unlock(KIRQL Irql)
110{
111DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s at IRQL %d\n", __FUNCTION__, Irql));
112
113if (Irql < DISPATCH_LEVEL) {
114KeReleaseSpinLock(&m_SpinLock, Irql);
115}
116else {
117KeReleaseSpinLockFromDpcLevel(&m_SpinLock);
118}
119
120DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
121}
122
123PAGED_CODE_SEG_BEGIN
124
125UINT VioGpuQueue::QueryAllocation()
126{
127PAGED_CODE();
128
129DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
130
131USHORT NumEntries;
132ULONG RingSize, HeapSize;
133
134NTSTATUS status = virtio_query_queue_allocation(
135m_pVIODevice,
136m_Index,
137&NumEntries,
138&RingSize,
139&HeapSize);
140if (!NT_SUCCESS(status))
141{
142DbgPrint(TRACE_LEVEL_FATAL, ("[%s] virtio_query_queue_allocation(%d) failed with error %x\n", __FUNCTION__, m_Index, status));
143return 0;
144}
145
146DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
147
148return NumEntries;
149}
150PAGED_CODE_SEG_END
151
152PAGED_CODE_SEG_BEGIN
153
154BOOLEAN CtrlQueue::GetDisplayInfo(PGPU_VBUFFER buf, UINT id, PULONG xres, PULONG yres)
155{
156PAGED_CODE();
157
158DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
159
160PGPU_RESP_DISP_INFO resp = (PGPU_RESP_DISP_INFO)buf->resp_buf;
161if (resp->hdr.type != VIRTIO_GPU_RESP_OK_DISPLAY_INFO)
162{
163DbgPrint(TRACE_LEVEL_VERBOSE, (" %s type = %x: disabled\n", __FUNCTION__, resp->hdr.type));
164return FALSE;
165}
166if (resp->pmodes[id].enabled) {
167DbgPrint(TRACE_LEVEL_VERBOSE, ("output %d: %dx%d+%d+%d\n", id,
168resp->pmodes[id].r.width,
169resp->pmodes[id].r.height,
170resp->pmodes[id].r.x,
171resp->pmodes[id].r.y));
172*xres = resp->pmodes[id].r.width;
173*yres = resp->pmodes[id].r.height;
174}
175else {
176DbgPrint(TRACE_LEVEL_VERBOSE, ("output %d: disabled\n", id));
177return FALSE;
178}
179
180DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
181
182return TRUE;
183}
184
185BOOLEAN CtrlQueue::AskDisplayInfo(PGPU_VBUFFER* buf)
186{
187PAGED_CODE();
188
189DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
190
191PGPU_CTRL_HDR cmd;
192PGPU_VBUFFER vbuf;
193PGPU_RESP_DISP_INFO resp_buf;
194KEVENT event;
195NTSTATUS status;
196
197resp_buf = reinterpret_cast<PGPU_RESP_DISP_INFO>
198(new (NonPagedPoolNx) BYTE[sizeof(GPU_RESP_DISP_INFO)]);
199
200if (!resp_buf)
201{
202DbgPrint(TRACE_LEVEL_ERROR, ("---> %s Failed allocate %d bytes\n", __FUNCTION__, sizeof(GPU_RESP_DISP_INFO)));
203return FALSE;
204}
205
206cmd = (PGPU_CTRL_HDR)AllocCmdResp(&vbuf, sizeof(GPU_CTRL_HDR), resp_buf, sizeof(GPU_RESP_DISP_INFO));
207RtlZeroMemory(cmd, sizeof(GPU_CTRL_HDR));
208
209cmd->type = VIRTIO_GPU_CMD_GET_DISPLAY_INFO;
210
211KeInitializeEvent(&event, NotificationEvent, FALSE);
212vbuf->event = &event;
213
214LARGE_INTEGER timeout = { 0 };
215timeout.QuadPart = Int32x32To64(1000, -10000);
216
217QueueBuffer(vbuf);
218status = KeWaitForSingleObject(&event,
219Executive,
220KernelMode,
221FALSE,
222&timeout);
223
224if (status == STATUS_TIMEOUT) {
225DbgPrint(TRACE_LEVEL_FATAL, ("---> Failed to ask display info\n"));
226VioGpuDbgBreak();
227}
228*buf = vbuf;
229
230DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
231
232return TRUE;
233}
234
235BOOLEAN CtrlQueue::AskEdidInfo(PGPU_VBUFFER* buf, UINT id)
236{
237PAGED_CODE();
238
239DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
240
241PGPU_CMD_GET_EDID cmd;
242PGPU_VBUFFER vbuf;
243PGPU_RESP_EDID resp_buf;
244KEVENT event;
245NTSTATUS status;
246
247resp_buf = reinterpret_cast<PGPU_RESP_EDID>
248(new (NonPagedPoolNx) BYTE[sizeof(GPU_RESP_EDID)]);
249
250if (!resp_buf)
251{
252DbgPrint(TRACE_LEVEL_ERROR, ("---> %s Failed allocate %d bytes\n", __FUNCTION__, sizeof(GPU_RESP_EDID)));
253return FALSE;
254}
255cmd = (PGPU_CMD_GET_EDID)AllocCmdResp(&vbuf, sizeof(GPU_CMD_GET_EDID), resp_buf, sizeof(GPU_RESP_EDID));
256RtlZeroMemory(cmd, sizeof(GPU_CMD_GET_EDID));
257
258cmd->hdr.type = VIRTIO_GPU_CMD_GET_EDID;
259cmd->scanout = id;
260
261KeInitializeEvent(&event, NotificationEvent, FALSE);
262vbuf->event = &event;
263
264LARGE_INTEGER timeout = { 0 };
265timeout.QuadPart = Int32x32To64(1000, -10000);
266
267QueueBuffer(vbuf);
268
269status = KeWaitForSingleObject(&event,
270Executive,
271KernelMode,
272FALSE,
273&timeout
274);
275
276if (status == STATUS_TIMEOUT) {
277DbgPrint(TRACE_LEVEL_FATAL, ("---> Failed to get edid info\n"));
278VioGpuDbgBreak();
279}
280
281*buf = vbuf;
282
283DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
284
285return TRUE;
286}
287
288BOOLEAN CtrlQueue::GetEdidInfo(PGPU_VBUFFER buf, UINT id, PBYTE edid)
289{
290PAGED_CODE();
291
292DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
293PGPU_CMD_GET_EDID cmd = (PGPU_CMD_GET_EDID)buf->buf;
294PGPU_RESP_EDID resp = (PGPU_RESP_EDID)buf->resp_buf;
295PUCHAR resp_edit = (PUCHAR)(resp->edid + (ULONGLONG)id * EDID_V1_BLOCK_SIZE);
296if (resp->hdr.type != VIRTIO_GPU_RESP_OK_EDID)
297{
298DbgPrint(TRACE_LEVEL_VERBOSE, (" %s type = %x: disabled\n", __FUNCTION__, resp->hdr.type));
299return FALSE;
300}
301if (cmd->scanout != id)
302{
303DbgPrint(TRACE_LEVEL_VERBOSE, (" %s invalid scaout = %x\n", __FUNCTION__, cmd->scanout));
304return FALSE;
305}
306
307RtlCopyMemory(edid, resp_edit, EDID_RAW_BLOCK_SIZE);
308DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
309
310return TRUE;
311}
312
313void CtrlQueue::CreateResource(UINT res_id, UINT format, UINT width, UINT height)
314{
315PAGED_CODE();
316
317DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
318
319PGPU_RES_CREATE_2D cmd;
320PGPU_VBUFFER vbuf;
321cmd = (PGPU_RES_CREATE_2D)AllocCmd(&vbuf, sizeof(*cmd));
322RtlZeroMemory(cmd, sizeof(*cmd));
323
324cmd->hdr.type = VIRTIO_GPU_CMD_RESOURCE_CREATE_2D;
325cmd->resource_id = res_id;
326cmd->format = format;
327cmd->width = width;
328cmd->height = height;
329
330//FIXME!!! if
331QueueBuffer(vbuf);
332
333DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
334}
335
336void CtrlQueue::ResFlush(UINT res_id, UINT width, UINT height, UINT x, UINT y)
337{
338PAGED_CODE();
339
340DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
341PGPU_RES_FLUSH cmd;
342PGPU_VBUFFER vbuf;
343cmd = (PGPU_RES_FLUSH)AllocCmd(&vbuf, sizeof(*cmd));
344RtlZeroMemory(cmd, sizeof(*cmd));
345
346cmd->hdr.type = VIRTIO_GPU_CMD_RESOURCE_FLUSH;
347cmd->resource_id = res_id;
348cmd->r.width = width;
349cmd->r.height = height;
350cmd->r.x = x;
351cmd->r.y = y;
352
353QueueBuffer(vbuf);
354
355DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
356}
357
358void CtrlQueue::TransferToHost2D(UINT res_id, ULONG offset, UINT width, UINT height, UINT x, UINT y, PUINT fence_id)
359{
360PAGED_CODE();
361
362DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
363PGPU_RES_TRANSF_TO_HOST_2D cmd;
364PGPU_VBUFFER vbuf;
365cmd = (PGPU_RES_TRANSF_TO_HOST_2D)AllocCmd(&vbuf, sizeof(*cmd));
366RtlZeroMemory(cmd, sizeof(*cmd));
367
368cmd->hdr.type = VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D;
369cmd->resource_id = res_id;
370cmd->offset = offset;
371cmd->r.width = width;
372cmd->r.height = height;
373cmd->r.x = x;
374cmd->r.y = y;
375
376if (fence_id) {
377cmd->hdr.flags |= VIRTIO_GPU_FLAG_FENCE;
378cmd->hdr.fence_id = *fence_id;
379}
380
381QueueBuffer(vbuf);
382
383DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
384}
385
386void CtrlQueue::AttachBacking(UINT res_id, PGPU_MEM_ENTRY ents, UINT nents)
387{
388PAGED_CODE();
389
390DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
391
392PGPU_RES_ATTACH_BACKING cmd;
393PGPU_VBUFFER vbuf;
394cmd = (PGPU_RES_ATTACH_BACKING)AllocCmd(&vbuf, sizeof(*cmd));
395RtlZeroMemory(cmd, sizeof(*cmd));
396
397cmd->hdr.type = VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING;
398cmd->resource_id = res_id;
399cmd->nr_entries = nents;
400
401vbuf->data_buf = ents;
402vbuf->data_size = sizeof(*ents) * nents;
403
404QueueBuffer(vbuf);
405
406DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
407}
408
409PAGED_CODE_SEG_END
410
411void CtrlQueue::UnrefResource(UINT res_id)
412{
413DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
414
415PGPU_RES_UNREF cmd;
416PGPU_VBUFFER vbuf;
417cmd = (PGPU_RES_UNREF)AllocCmd(&vbuf, sizeof(*cmd));
418RtlZeroMemory(cmd, sizeof(*cmd));
419
420cmd->hdr.type = VIRTIO_GPU_CMD_RESOURCE_UNREF;
421cmd->resource_id = res_id;
422
423QueueBuffer(vbuf);
424
425DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
426}
427
428void CtrlQueue::InvalBacking(UINT res_id)
429{
430DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
431
432PGPU_RES_DETACH_BACKING cmd;
433PGPU_VBUFFER vbuf;
434cmd = (PGPU_RES_DETACH_BACKING)AllocCmd(&vbuf, sizeof(*cmd));
435RtlZeroMemory(cmd, sizeof(*cmd));
436
437cmd->hdr.type = VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING;
438cmd->resource_id = res_id;
439
440QueueBuffer(vbuf);
441
442DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
443}
444
445PVOID CtrlQueue::AllocCmdResp(PGPU_VBUFFER* buf, int cmd_sz, PVOID resp_buf, int resp_sz)
446{
447DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
448
449PGPU_VBUFFER vbuf;
450vbuf = m_pBuf->GetBuf(cmd_sz, resp_sz, resp_buf);
451ASSERT(vbuf);
452*buf = vbuf;
453
454DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
455
456return vbuf ? vbuf->buf : NULL;
457}
458
459PVOID CtrlQueue::AllocCmd(PGPU_VBUFFER* buf, int sz)
460{
461DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
462
463if (buf == NULL || sz == 0) {
464return NULL;
465}
466
467PGPU_VBUFFER vbuf = m_pBuf->GetBuf(sz, sizeof(GPU_CTRL_HDR), NULL);
468ASSERT(vbuf);
469*buf = vbuf;
470
471DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s vbuf = %p\n", __FUNCTION__, vbuf));
472
473return vbuf ? vbuf->buf : NULL;
474}
475
476void CtrlQueue::SetScanout(UINT scan_id, UINT res_id, UINT width, UINT height, UINT x, UINT y)
477{
478DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
479
480PGPU_SET_SCANOUT cmd;
481PGPU_VBUFFER vbuf;
482cmd = (PGPU_SET_SCANOUT)AllocCmd(&vbuf, sizeof(*cmd));
483RtlZeroMemory(cmd, sizeof(*cmd));
484
485cmd->hdr.type = VIRTIO_GPU_CMD_SET_SCANOUT;
486cmd->resource_id = res_id;
487cmd->scanout_id = scan_id;
488cmd->r.width = width;
489cmd->r.height = height;
490cmd->r.x = x;
491cmd->r.y = y;
492
493//FIXME if
494QueueBuffer(vbuf);
495
496DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
497}
498
499
500#define SGLIST_SIZE 64
501UINT CtrlQueue::QueueBuffer(PGPU_VBUFFER buf)
502{
503DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
504
505VirtIOBufferDescriptor sg[SGLIST_SIZE];
506UINT sgleft = SGLIST_SIZE;
507UINT outcnt = 0, incnt = 0;
508UINT ret = 0;
509KIRQL SavedIrql;
510
511if (buf->size > PAGE_SIZE) {
512DbgPrint(TRACE_LEVEL_ERROR, ("<--> %s size is too big %d\n", __FUNCTION__, buf->size));
513return 0;
514}
515
516if (BuildSGElement(&sg[outcnt + incnt], (PVOID)buf->buf, buf->size))
517{
518outcnt++;
519sgleft--;
520}
521
522if (buf->data_size)
523{
524ULONG data_size = buf->data_size;
525PVOID data_buf = (PVOID)buf->data_buf;
526while (data_size)
527{
528if (BuildSGElement(&sg[outcnt + incnt], data_buf, data_size))
529{
530data_buf = (PVOID)((LONG_PTR)(data_buf)+PAGE_SIZE);
531data_size -= min(data_size, PAGE_SIZE);
532outcnt++;
533sgleft--;
534if (sgleft == 0) {
535DbgPrint(TRACE_LEVEL_ERROR, ("<--> %s no more sgelenamt spots left %d\n", __FUNCTION__, outcnt));
536return 0;
537}
538}
539}
540}
541
542if (buf->resp_size > PAGE_SIZE) {
543DbgPrint(TRACE_LEVEL_ERROR, ("<--> %s resp_size is too big %d\n", __FUNCTION__, buf->resp_size));
544return 0;
545}
546
547if (buf->resp_size && (sgleft > 0))
548{
549if (BuildSGElement(&sg[outcnt + incnt], (PVOID)buf->resp_buf, buf->resp_size))
550{
551incnt++;
552sgleft--;
553}
554}
555
556DbgPrint(TRACE_LEVEL_VERBOSE, ("<--> %s sgleft %d\n", __FUNCTION__, sgleft));
557
558Lock(&SavedIrql);
559ret = AddBuf(&sg[0], outcnt, incnt, buf, NULL, 0);
560Kick();
561Unlock(SavedIrql);
562
563DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s ret = %d\n", __FUNCTION__, ret));
564
565return ret;
566}
567
568PGPU_VBUFFER CtrlQueue::DequeueBuffer(_Out_ UINT* len)
569{
570DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
571
572PGPU_VBUFFER buf = NULL;
573KIRQL SavedIrql;
574Lock(&SavedIrql);
575buf = (PGPU_VBUFFER)GetBuf(len);
576Unlock(SavedIrql);
577if (buf == NULL)
578{
579*len = 0;
580}
581DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
582
583return buf;
584}
585
586
587void VioGpuQueue::ReleaseBuffer(PGPU_VBUFFER buf)
588{
589DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
590
591m_pBuf->FreeBuf(buf);
592
593DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
594}
595
596
597BOOLEAN VioGpuBuf::Init(_In_ UINT cnt)
598{
599KIRQL OldIrql;
600
601DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
602
603for (UINT i = 0; i < cnt; ++i) {
604PGPU_VBUFFER pvbuf = reinterpret_cast<PGPU_VBUFFER>
605(new (NonPagedPoolNx) BYTE[VBUFFER_SIZE]);
606//FIXME
607RtlZeroMemory(pvbuf, VBUFFER_SIZE);
608if (pvbuf)
609{
610KeAcquireSpinLock(&m_SpinLock, &OldIrql);
611InsertTailList(&m_FreeBufs, &pvbuf->list_entry);
612++m_uCount;
613KeReleaseSpinLock(&m_SpinLock, OldIrql);
614}
615}
616ASSERT(m_uCount == cnt);
617
618DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
619
620return (m_uCount > 0);
621}
622
623void VioGpuBuf::Close(void)
624{
625KIRQL OldIrql;
626
627DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
628
629KeAcquireSpinLock(&m_SpinLock, &OldIrql);
630while (!IsListEmpty(&m_InUseBufs))
631{
632LIST_ENTRY* pListItem = RemoveHeadList(&m_InUseBufs);
633if (pListItem)
634{
635PGPU_VBUFFER pvbuf = CONTAINING_RECORD(pListItem, GPU_VBUFFER, list_entry);
636ASSERT(pvbuf);
637ASSERT(pvbuf->resp_size <= MAX_INLINE_RESP_SIZE);
638
639delete[] reinterpret_cast<PBYTE>(pvbuf);
640--m_uCount;
641}
642}
643
644while (!IsListEmpty(&m_FreeBufs))
645{
646LIST_ENTRY* pListItem = RemoveHeadList(&m_FreeBufs);
647if (pListItem)
648{
649PGPU_VBUFFER pbuf = CONTAINING_RECORD(pListItem, GPU_VBUFFER, list_entry);
650ASSERT(pbuf);
651
652if (pbuf->resp_buf && pbuf->resp_size > MAX_INLINE_RESP_SIZE)
653{
654delete[] reinterpret_cast<PBYTE>(pbuf->resp_buf);
655pbuf->resp_buf = NULL;
656pbuf->resp_size = 0;
657}
658
659if (pbuf->data_buf && pbuf->data_size)
660{
661delete[] reinterpret_cast<PBYTE>(pbuf->data_buf);
662pbuf->data_buf = NULL;
663pbuf->data_size = 0;
664}
665
666delete[] reinterpret_cast<PBYTE>(pbuf);
667--m_uCount;
668}
669}
670KeReleaseSpinLock(&m_SpinLock, OldIrql);
671
672ASSERT(m_uCount == 0);
673
674DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
675}
676
677PGPU_VBUFFER VioGpuBuf::GetBuf(
678_In_ int size,
679_In_ int resp_size,
680_In_opt_ void *resp_buf)
681{
682
683DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
684
685PGPU_VBUFFER pbuf = NULL;
686PLIST_ENTRY pListItem = NULL;
687KIRQL SavedIrql = KeGetCurrentIrql();
688
689if (SavedIrql < DISPATCH_LEVEL) {
690KeAcquireSpinLock(&m_SpinLock, &SavedIrql);
691}
692else if (SavedIrql == DISPATCH_LEVEL) {
693KeAcquireSpinLockAtDpcLevel(&m_SpinLock);
694}
695else {
696VioGpuDbgBreak();
697}
698
699if (!IsListEmpty(&m_FreeBufs))
700{
701pListItem = RemoveHeadList(&m_FreeBufs);
702pbuf = CONTAINING_RECORD(pListItem, GPU_VBUFFER, list_entry);
703ASSERT(pvbuf);
704memset(pbuf, 0, VBUFFER_SIZE);
705ASSERT(size > MAX_INLINE_CMD_SIZE);
706
707pbuf->buf = (char *)((ULONG_PTR)pbuf + sizeof(*pbuf));
708pbuf->size = size;
709
710pbuf->resp_size = resp_size;
711if (resp_size <= MAX_INLINE_RESP_SIZE)
712{
713pbuf->resp_buf = (char *)((ULONG_PTR)pbuf->buf + size);
714}
715else
716{
717pbuf->resp_buf = (char *)resp_buf;
718}
719ASSERT(vbuf->resp_buf);
720InsertTailList(&m_InUseBufs, &pbuf->list_entry);
721}
722else
723{
724DbgPrint(TRACE_LEVEL_FATAL, ("<--- %s Cannot allocate buffer\n", __FUNCTION__));
725VioGpuDbgBreak();
726}
727if (SavedIrql < DISPATCH_LEVEL) {
728KeReleaseSpinLock(&m_SpinLock, SavedIrql);
729}
730else if (SavedIrql == DISPATCH_LEVEL) {
731KeReleaseSpinLockFromDpcLevel(&m_SpinLock);
732}
733else {
734VioGpuDbgBreak();
735}
736
737DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s buf = %p\n", __FUNCTION__, pbuf));
738
739return pbuf;
740}
741
742void VioGpuBuf::FreeBuf(
743_In_ PGPU_VBUFFER pbuf)
744{
745KIRQL OldIrql;
746
747DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s buf = %p\n", __FUNCTION__, pbuf));
748KeAcquireSpinLock(&m_SpinLock, &OldIrql);
749
750if (!IsListEmpty(&m_InUseBufs))
751{
752PLIST_ENTRY leCurrent = m_InUseBufs.Flink;
753PGPU_VBUFFER pvbuf = CONTAINING_RECORD(leCurrent, GPU_VBUFFER, list_entry);
754while (leCurrent && pvbuf)
755{
756if (pvbuf == pbuf)
757{
758RemoveEntryList(leCurrent);
759pvbuf = NULL;
760break;
761}
762
763leCurrent = leCurrent->Flink;
764if (leCurrent) {
765pvbuf = CONTAINING_RECORD(leCurrent, GPU_VBUFFER, list_entry);
766}
767}
768}
769if (pbuf->resp_buf && pbuf->resp_size > MAX_INLINE_RESP_SIZE)
770{
771delete[] reinterpret_cast<PBYTE>(pbuf->resp_buf);
772pbuf->resp_buf = NULL;
773pbuf->resp_size = 0;
774}
775
776if (pbuf->data_buf && pbuf->data_size)
777{
778delete[] reinterpret_cast<PBYTE>(pbuf->data_buf);
779pbuf->data_buf = NULL;
780pbuf->data_size = 0;
781}
782
783InsertTailList(&m_FreeBufs, &pbuf->list_entry);
784KeReleaseSpinLock(&m_SpinLock, OldIrql);
785
786DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
787}
788
789PAGED_CODE_SEG_BEGIN
790VioGpuBuf::VioGpuBuf()
791{
792PAGED_CODE();
793
794DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
795
796InitializeListHead(&m_FreeBufs);
797InitializeListHead(&m_InUseBufs);
798KeInitializeSpinLock(&m_SpinLock);
799m_uCount = 0;
800
801DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
802}
803
804VioGpuBuf::~VioGpuBuf()
805{
806PAGED_CODE();
807
808DbgPrint(TRACE_LEVEL_FATAL, ("---> %s 0x%p\n", __FUNCTION__, this));
809
810Close();
811
812DbgPrint(TRACE_LEVEL_FATAL, ("<--- %s\n", __FUNCTION__));
813}
814
815VioGpuMemSegment::VioGpuMemSegment(void)
816{
817PAGED_CODE();
818
819DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
820
821m_pSGList = NULL;
822m_pVAddr = NULL;
823m_pMdl = NULL;
824m_bSystemMemory = FALSE;
825m_bMapped = FALSE;
826m_Size = 0;
827DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
828}
829
830VioGpuMemSegment::~VioGpuMemSegment(void)
831{
832PAGED_CODE();
833
834DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
835
836Close();
837
838DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
839}
840
841BOOLEAN VioGpuMemSegment::Init(_In_ UINT size, _In_opt_ PPHYSICAL_ADDRESS pPAddr)
842{
843PAGED_CODE();
844
845DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
846
847ASSERT(size);
848PVOID buf = NULL;
849UINT pages = BYTES_TO_PAGES(size);
850UINT sglsize = sizeof(SCATTER_GATHER_LIST) + (sizeof(SCATTER_GATHER_ELEMENT) * pages);
851size = pages * PAGE_SIZE;
852
853if ((pPAddr == NULL) ||
854pPAddr->QuadPart == 0LL) {
855m_pVAddr = new (NonPagedPoolNx) BYTE[size];
856
857if (!m_pVAddr)
858{
859DbgPrint(TRACE_LEVEL_FATAL, ("%s insufficient resources to allocate %x bytes\n", __FUNCTION__, size));
860return FALSE;
861}
862RtlZeroMemory(m_pVAddr, size);
863m_bSystemMemory = TRUE;
864}
865else {
866NTSTATUS Status = MapFrameBuffer(*pPAddr, size, &m_pVAddr);
867if (!NT_SUCCESS(Status)) {
868DbgPrint(TRACE_LEVEL_FATAL, ("<--- %s MapFrameBuffer failed with Status: 0x%X\n", __FUNCTION__, Status));
869return FALSE;
870}
871m_bMapped = TRUE;
872}
873
874m_pMdl = IoAllocateMdl(m_pVAddr, size, FALSE, FALSE, NULL);
875if (!m_pMdl)
876{
877DbgPrint(TRACE_LEVEL_FATAL, ("%s insufficient resources to allocate MDLs\n", __FUNCTION__));
878return FALSE;
879}
880if (m_bSystemMemory == TRUE) {
881__try
882{
883MmProbeAndLockPages(m_pMdl, KernelMode, IoWriteAccess);
884}
885#pragma prefast(suppress: __WARNING_EXCEPTIONEXECUTEHANDLER, "try/except is only able to protect against user-mode errors and these are the only errors we try to catch here");
886__except (EXCEPTION_EXECUTE_HANDLER)
887{
888DbgPrint(TRACE_LEVEL_FATAL, ("%s Failed to lock pages with error %x\n", __FUNCTION__, GetExceptionCode()));
889IoFreeMdl(m_pMdl);
890return FALSE;
891}
892}
893m_pSGList = reinterpret_cast<PSCATTER_GATHER_LIST>(new (NonPagedPoolNx) BYTE[sglsize]);
894m_pSGList->NumberOfElements = 0;
895m_pSGList->Reserved = 0;
896// m_pSAddr = reinterpret_cast<BYTE*>
897// (MmGetSystemAddressForMdlSafe(m_pMdl, NormalPagePriority | MdlMappingNoExecute));
898
899RtlZeroMemory(m_pSGList, sglsize);
900buf = PAGE_ALIGN(m_pVAddr);
901
902for (UINT i = 0; i < pages; ++i)
903{
904PHYSICAL_ADDRESS pa = { 0 };
905ASSERT(MmIsAddressValid(buf));
906pa = MmGetPhysicalAddress(buf);
907if (pa.QuadPart == 0LL)
908{
909DbgPrint(TRACE_LEVEL_FATAL, ("%s Invalid PA buf = %p element %d\n", __FUNCTION__, buf, i));
910break;
911}
912m_pSGList->Elements[i].Address = pa;
913m_pSGList->Elements[i].Length = PAGE_SIZE;
914buf = (PVOID)((LONG_PTR)(buf)+PAGE_SIZE);
915m_pSGList->NumberOfElements++;
916}
917m_Size = size;
918DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
919
920return TRUE;
921}
922
923PHYSICAL_ADDRESS VioGpuMemSegment::GetPhysicalAddress(void)
924{
925PAGED_CODE();
926
927DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
928
929PHYSICAL_ADDRESS pa = { 0 };
930if (m_pVAddr && MmIsAddressValid(m_pVAddr))
931{
932pa = MmGetPhysicalAddress(m_pVAddr);
933}
934
935DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
936
937return pa;
938}
939
940void VioGpuMemSegment::Close(void)
941{
942PAGED_CODE();
943
944DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
945
946if (m_pMdl)
947{
948if (m_bSystemMemory) {
949MmUnlockPages(m_pMdl);
950}
951IoFreeMdl(m_pMdl);
952m_pMdl = NULL;
953}
954
955if (m_bSystemMemory) {
956delete[] m_pVAddr;
957}
958else {
959UnmapFrameBuffer(m_pVAddr, (ULONG)m_Size);
960m_bMapped = FALSE;
961}
962m_pVAddr = NULL;
963
964delete[] reinterpret_cast<PBYTE>(m_pSGList);
965m_pSGList = NULL;
966
967DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
968}
969
970
971VioGpuObj::VioGpuObj(void)
972{
973PAGED_CODE();
974
975DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
976
977m_uiHwRes = 0;
978m_pSegment = NULL;
979m_Size = 0;
980
981DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
982}
983
984VioGpuObj::~VioGpuObj(void)
985{
986PAGED_CODE();
987
988DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
989
990DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s\n", __FUNCTION__));
991}
992
993BOOLEAN VioGpuObj::Init(_In_ UINT size, VioGpuMemSegment *pSegment)
994{
995PAGED_CODE();
996
997DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s requested size = %d\n", __FUNCTION__, size));
998
999ASSERT(size);
1000ASSERT(pSegment);
1001UINT pages = BYTES_TO_PAGES(size);
1002size = pages * PAGE_SIZE;
1003if (size > pSegment->GetSize())
1004{
1005DbgPrint(TRACE_LEVEL_FATAL, ("<--- %s segment size too small = %Iu (%u)\n", __FUNCTION__, m_pSegment->GetSize(), size));
1006return FALSE;
1007}
1008m_pSegment = pSegment;
1009m_Size = size;
1010DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s size = %Iu\n", __FUNCTION__, m_Size));
1011return TRUE;
1012}
1013
1014PVOID CrsrQueue::AllocCursor(PGPU_VBUFFER* buf)
1015{
1016PAGED_CODE();
1017
1018DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
1019
1020PGPU_VBUFFER vbuf;
1021vbuf = m_pBuf->GetBuf(sizeof(GPU_UPDATE_CURSOR), 0, NULL);
1022ASSERT(vbuf);
1023*buf = vbuf;
1024
1025DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s vbuf = %p\n", __FUNCTION__, vbuf));
1026
1027return vbuf ? vbuf->buf : NULL;
1028}
1029
1030PAGED_CODE_SEG_END
1031
1032UINT CrsrQueue::QueueCursor(PGPU_VBUFFER buf)
1033{
1034// PAGED_CODE();
1035
1036DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
1037
1038UINT res = 0;
1039KIRQL SavedIrql;
1040
1041VirtIOBufferDescriptor sg[1];
1042int outcnt = 0;
1043UINT ret = 0;
1044
1045ASSERT(buf->size <= PAGE_SIZE);
1046if (BuildSGElement(&sg[outcnt], (PVOID)buf->buf, buf->size))
1047{
1048outcnt++;
1049}
1050
1051ASSERT(outcnt);
1052Lock(&SavedIrql);
1053ret = AddBuf(&sg[0], outcnt, 0, buf, NULL, 0);
1054Kick();
1055Unlock(SavedIrql);
1056
1057DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s vbuf = %p outcnt = %d, ret = %d\n", __FUNCTION__, buf, outcnt, ret));
1058return res;
1059}
1060
1061PGPU_VBUFFER CrsrQueue::DequeueCursor(_Out_ UINT* len)
1062{
1063DbgPrint(TRACE_LEVEL_VERBOSE, ("---> %s\n", __FUNCTION__));
1064
1065PGPU_VBUFFER buf = NULL;
1066KIRQL SavedIrql;
1067Lock(&SavedIrql);
1068buf = (PGPU_VBUFFER)GetBuf(len);
1069Unlock(SavedIrql);
1070if (buf == NULL)
1071{
1072*len = 0;
1073}
1074DbgPrint(TRACE_LEVEL_VERBOSE, ("<--- %s buf %p len = %u\n", __FUNCTION__, buf, *len));
1075return buf;
1076}
1077