After setting tree node height, treeview scroll incorrectly - c#

public Form2()
{
InitializeComponent();
}
private void Form2_Load(object sender, EventArgs e)
{
Dictionary<int, TreeNode> map = new Dictionary<int, TreeNode>();
for (int i = 0; i < 100; i++)
{
TreeNode node = new TreeNode(i.ToString());
if (i % 10 == 0)
_tree.Nodes.Add(node);
map.Add(i, node);
if (i % 10 != 0)
{
map[Convert.ToInt32(Math.Floor(i / 10.0)) * 10].Nodes.Add(node);
node.SetNodeHeight(2);
}
}
}
}
public static class TreeViewExtendMethod
{
[StructLayout(LayoutKind.Sequential)]
struct TVITEMEX
{
public Mask mask;
public IntPtr item;
public uint state;
public uint stateMask;
public IntPtr pszText;
public int cchTextMax;
public int iImage;
public int iSelectedImage;
public int iChildren;
public IntPtr lParam;
public int iIntegral;
// ... plus some windows 6+ crap
}
[Flags]
enum Mask : uint
{
Text = 1,
Image = 2,
Param = 4,
State = 8,
Handle = 16,
SelectedImage = 32,
Children = 64,
Integral = 128,
}
public static int GetNodeHeight(this TreeNode tn)
{
TVITEMEX tvix = new TVITEMEX();
tvix.mask = Mask.Handle | Mask.Integral;
tvix.item = tn.Handle;
tvix.iIntegral = 0;
IntPtr hParam=Marshal.AllocHGlobal(Marshal.SizeOf(tvix));
Marshal.StructureToPtr(tvix, hParam, false);
win32.SendMessage(tn.TreeView.Handle, win32.TVM_GETITEM, IntPtr.Zero, hParam);
return tvix.iIntegral;
}
public static void SetNodeHeight(this TreeNode tn, int height)
{
TVITEMEX tvix = new TVITEMEX();
tvix.mask = Mask.Handle | Mask.Integral;
tvix.item = tn.Handle;
tvix.iIntegral = height;
IntPtr hParam=Marshal.AllocHGlobal(Marshal.SizeOf(tvix));
Marshal.StructureToPtr(tvix, hParam, false);
win32.SendMessage(tn.TreeView.Handle, win32.TVM_SETITEM, IntPtr.Zero, hParam);
}
}
The above is the sample code, after using the PINV to change the node height manually, when click in the bottom of the scroll bar, the treeview will scroll which should not scroll at all as it just a click.
At first, I think it may be caused by scroll bar, it doesn't work as expected after I set the scroll Msg para (8) to end scroll. At last, while catching the scroll msg (0x115), exits the WndProc, and then it works. I have no idea what is going on as I can't find any msg can cancel scroll except setting the wParam to 8 which is certainly not work.

I finally find a solution, though not elegant, but it should works.
Here is the solution,
protected override void WndProc(ref Message m)
{
if (m.Msg == 0x115)
{
if (((int)m.WParam & 5) == (5) ||
((int)m.WParam & 4) == (4)
)
{
int para = (int)m.WParam;
int temp = (para - (para % 2 == 0 ? 4 : 5)) >> 16;
// store the current position, if the position doesn't change, no need to handle
if (temp == pos)
return;
else
pos = temp;
}
if (((int)m.WParam & 8) == 8)
return;
//Debug.WriteLine(m.WParam);
//if (DisableScroll)
// return;
// //m.WParam = (IntPtr)8;
//SCROLLINFO si = new SCROLLINFO();
//si.cbSize = Marshal.SizeOf(si);
//si.fMask = (int)(ScrollInfoMask.SIF_POS | ScrollInfoMask.SIF_PAGE | ScrollInfoMask.SIF_RANGE | ScrollInfoMask.SIF_TRACKPOS);
//GetScrollInfo(this.Handle, 1, ref si);
////Debug.WriteLine(string.Format("Max pos:{0}, Min pos: {1}", si.nMin, si.nMax));
//Debug.WriteLine(string.Format("wParam:{0} hParam:{1}", m.WParam, m.LParam));
//Debug.WriteLine(string.Format("It page size: {0}, position: {1}, range: {2}, track pos: {3}", si.nPage, si.nPos, si.nPage, si.nTrackPos));
//if (si.nPage + Math.Max(si.nPos, si.nTrackPos) >= si.nMax)
//{
// return;
//}
}
base.WndProc(ref m);
}

Related

How to get drive letter of usb drive from vendor/product id?

I only know the Vendor and Product ID of a usb drive but I need to be able to get all drive letters associated to this device.
System.IO.DriveInfo doesn't give any IDs with which I can find the vid/pid.
LibUsb wrappers have the opposite problem -- tons of id information but nothing that I can connect to a mount point.
There was a related post several years ago.
I adapted the code of the answer:
static void Main(string[] args)
{
// example call
string device = FindPath("VID_0781&PID_5583");
GetDriveLetter(device);
}
static public string FindPath(string pattern)
{
var USBobjects = new List<string>();
string Entity = "*none*";
foreach (ManagementObject entity in new ManagementObjectSearcher(
$"select * from Win32_USBHub Where DeviceID Like '%{pattern}%'").Get())
{
Entity = entity["DeviceID"].ToString();
foreach (ManagementObject controller in entity.GetRelated("Win32_USBController"))
{
foreach (ManagementObject obj in new ManagementObjectSearcher(
"ASSOCIATORS OF {Win32_USBController.DeviceID='"
+ controller["PNPDeviceID"].ToString() + "'}").Get())
{
if (obj.ToString().Contains("DeviceID"))
USBobjects.Add(obj["DeviceID"].ToString());
}
}
}
int VidPidposition = USBobjects.IndexOf(Entity);
for (int i = VidPidposition; i <= USBobjects.Count; i++)
{
if (USBobjects[i].Contains("USBSTOR"))
{
return USBobjects[i];
}
}
return "*none*";
}
public static void GetDriveLetter(string device)
{
int driveCount = 0;
foreach (ManagementObject drive in new ManagementObjectSearcher("select * from Win32_DiskDrive").Get())
{
if (drive["PNPDeviceID"].ToString() == device)
{
foreach (ManagementObject o in drive.GetRelated("Win32_DiskPartition"))
{
foreach (ManagementObject i in o.GetRelated("Win32_LogicalDisk"))
{
Console.WriteLine("Disk: " + i["Name"].ToString());
driveCount++;
}
}
}
}
if (driveCount == 0)
{
Console.WriteLine("No drive identified!");
}
}
Some error handling and optimization would make sense.
Here is a solution that works for me, giving you all drives matching your vendor and product ids. Not all the code was written by me.
#include <windows.h>
#include <stdio.h>
#include <setupapi.h>
#include <cfgmgr32.h>
#include <winioctl.h>
#define BUFFER_SIZE 256
#define MAX_DRIVES 26
// Finds the device interface for the CDROM drive with the given interface number.
DEVINST GetDrivesDevInstByDeviceNumber(long DeviceNumber)
{
const GUID *guid = &GUID_DEVINTERFACE_DISK;
// Get device interface info set handle
// for all devices attached to system
HDEVINFO hDevInfo = SetupDiGetClassDevs(guid, NULL, NULL, DIGCF_PRESENT | DIGCF_DEVICEINTERFACE);
if (hDevInfo == INVALID_HANDLE_VALUE)
return 0;
// Retrieve a context structure for a device interface of a device information set.
BYTE buf[1024];
PSP_DEVICE_INTERFACE_DETAIL_DATA pspdidd = (PSP_DEVICE_INTERFACE_DETAIL_DATA)buf;
SP_DEVICE_INTERFACE_DATA spdid;
SP_DEVINFO_DATA spdd;
DWORD dwSize;
spdid.cbSize = sizeof(spdid);
// Iterate through all the interfaces and try to match one based on
// the device number.
for (DWORD i = 0; SetupDiEnumDeviceInterfaces(hDevInfo, NULL, guid, i, &spdid); i++)
{
// Get the device path.
dwSize = 0;
SetupDiGetDeviceInterfaceDetail(hDevInfo, &spdid, NULL, 0, &dwSize, NULL);
if (dwSize == 0 || dwSize > sizeof(buf))
continue;
pspdidd->cbSize = sizeof(*pspdidd);
ZeroMemory((PVOID)&spdd, sizeof(spdd));
spdd.cbSize = sizeof(spdd);
if (!SetupDiGetDeviceInterfaceDetail(hDevInfo, &spdid, pspdidd,
dwSize, &dwSize, &spdd))
continue;
// Open the device.
HANDLE hDrive = CreateFile(pspdidd->DevicePath, 0,
FILE_SHARE_READ | FILE_SHARE_WRITE,
NULL, OPEN_EXISTING, 0, NULL);
if (hDrive == INVALID_HANDLE_VALUE)
continue;
// Get the device number.
STORAGE_DEVICE_NUMBER sdn;
dwSize = 0;
if (DeviceIoControl(hDrive,
IOCTL_STORAGE_GET_DEVICE_NUMBER,
NULL, 0, &sdn, sizeof(sdn),
&dwSize, NULL))
{
// Does it match?
if (DeviceNumber == (long)sdn.DeviceNumber)
{
CloseHandle(hDrive);
SetupDiDestroyDeviceInfoList(hDevInfo);
return spdd.DevInst;
}
}
CloseHandle(hDrive);
}
SetupDiDestroyDeviceInfoList(hDevInfo);
return 0;
}
// Returns true if the given device instance belongs to the USB device with the given VID and PID.
boolean matchDevInstToUsbDevice(DEVINST device, DWORD vid, DWORD pid)
{
// This is the string we will be searching for in the device harware IDs.
TCHAR hwid[64];
sprintf(hwid, "VID_%04X&PID_%04X", vid, pid);
// Get a list of hardware IDs for all USB devices.
ULONG ulLen;
CM_Get_Device_ID_List_Size(&ulLen, NULL, CM_GETIDLIST_FILTER_NONE);
TCHAR *pszBuffer = malloc(sizeof(TCHAR) * ulLen);
CM_Get_Device_ID_List(NULL, pszBuffer, ulLen, CM_GETIDLIST_FILTER_NONE);
// Iterate through the list looking for our ID.
for (LPTSTR pszDeviceID = pszBuffer; *pszDeviceID; pszDeviceID += _tcslen(pszDeviceID) + 1)
{
// Some versions of Windows have the string in upper case and other versions have it
// in lower case so just make it all upper.
for (int i = 0; pszDeviceID[i]; i++)
pszDeviceID[i] = toupper(pszDeviceID[i]);
if (_tcsstr(pszDeviceID, hwid))
{
// Found the device, now we want the grandchild device, which is the "generic volume"
DEVINST MSDInst = 0;
if (CR_SUCCESS == CM_Locate_DevNode(&MSDInst, pszDeviceID, CM_LOCATE_DEVNODE_NORMAL))
{
DEVINST DiskDriveInst = 0;
if (CR_SUCCESS == CM_Get_Child(&DiskDriveInst, MSDInst, 0))
{
// Now compare the grandchild node against the given device instance.
if (device == DiskDriveInst)
return TRUE;
}
}
}
}
return FALSE;
}
int scan_drives(DWORD vid, DWORD pid)
{
TCHAR caDrive[4] = TEXT("A:\\");
TCHAR volume[BUFFER_SIZE];
TCHAR volume_path_name[BUFFER_SIZE];
DWORD dwDriveMask;
int count = 0;
// Get all drives in the system.
dwDriveMask = GetLogicalDrives();
if (dwDriveMask == 0)
{
printf("Error - GetLogicalDrives failed\n");
return -1;
}
// Loop for all drives.
for (int nLoopIndex = 0; nLoopIndex < MAX_DRIVES; nLoopIndex++, dwDriveMask >>= 1)
{
// If a drive is present,
if (dwDriveMask & 1)
{
caDrive[0] = TEXT('A') + nLoopIndex;
// If a drive is removable.
if (GetDriveType(caDrive) == DRIVE_REMOVABLE)
{
//Get its volume info.
if (GetVolumeNameForVolumeMountPoint(caDrive, volume, BUFFER_SIZE))
{
DWORD lpcchReturnLength;
GetVolumePathNamesForVolumeName(volume, volume_path_name, BUFFER_SIZE, &lpcchReturnLength);
char szVolumeAccessPath[] = "\\\\.\\X:";
szVolumeAccessPath[4] = caDrive[0];
long DeviceNumber = -1;
HANDLE hVolume = CreateFile(szVolumeAccessPath, 0, FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_EXISTING, 0, NULL);
if (hVolume == INVALID_HANDLE_VALUE)
{
return 1;
}
STORAGE_DEVICE_NUMBER sdn;
DWORD dwBytesReturned = 0;
long res = DeviceIoControl(hVolume, IOCTL_STORAGE_GET_DEVICE_NUMBER, NULL, 0, &sdn, sizeof(sdn), &dwBytesReturned, NULL);
if (res)
{
DeviceNumber = sdn.DeviceNumber;
}
CloseHandle(hVolume);
if (DeviceNumber != -1)
{
DEVINST DevInst = GetDrivesDevInstByDeviceNumber(DeviceNumber);
boolean match = matchDevInstToUsbDevice(DevInst, vid, pid);
if (match)
{
printf("%s\t0x%04x\t0x%04x\n", volume_path_name, vid, pid);
}
}
count++;
}
}
}
}
return count;
}
int main(int argc, char *argv[])
{
if (argc > 1)
{
if (argc % 2 == 0)
{
return -1;
}
for (int i = 1; i < argc - 1; i += 2)
{
int vid, pid;
sscanf(argv[i], "%x", &vid);
sscanf(argv[i + 1], "%x", &pid);
scan_drives(vid, pid);
}
}
return 0;
}

FFmpeg.swr_convert: audio to raw 16 bit pcm, to be used with xna SoundEffect. Audio cuts out when i convert

I want to resample mkv(vp8/ogg) and also raw 4 bit adpcm to raw 16bit pcm byte[] to be loaded into SoundEffect from xna library. So I can play it out while I'm using other code to display the frames (the video side is working).
I can read a 16 bit wav file and play it. But when I goto resample something it doesn't play 100%. One file is 3 mins and 15 secs. I only get 13 sec and 739 ms before it quits playing. I have been learning to do this by finding code samples in c++ and correcting it to work in c# using ffmpeg.autogen.
the below is my best attempt at resampling.
int nb_samples = Frame->nb_samples;
int output_nb_samples = nb_samples;
int nb_channels = ffmpeg.av_get_channel_layout_nb_channels(ffmpeg.AV_CH_LAYOUT_STEREO);
int bytes_per_sample = ffmpeg.av_get_bytes_per_sample(AVSampleFormat.AV_SAMPLE_FMT_S16) * nb_channels;
int bufsize = ffmpeg.av_samples_get_buffer_size(null, nb_channels, nb_samples,
AVSampleFormat.AV_SAMPLE_FMT_S16, 1);
byte*[] b = Frame->data;
fixed (byte** input = b)
{
byte* output = null;
ffmpeg.av_samples_alloc(&output, null,
nb_channels,
nb_samples,
(AVSampleFormat)Frame->format, 0);//
// Buffer input
Ret = ffmpeg.swr_convert(Swr, &output, output_nb_samples / 2, input, nb_samples);
CheckRet();
WritetoMs(output, 0, Ret * bytes_per_sample);
output_nb_samples -= Ret;
// Drain buffer
while ((Ret = ffmpeg.swr_convert(Swr, &output, output_nb_samples, null, 0)) > 0)
{
CheckRet();
WritetoMs(output, 0, Ret * bytes_per_sample);
output_nb_samples -= Ret;
}
}
I changed that all to this but it cuts off sooner.
Channels = ffmpeg.av_get_channel_layout_nb_channels(OutFrame->channel_layout);
int nb_channels = ffmpeg.av_get_channel_layout_nb_channels(ffmpeg.AV_CH_LAYOUT_STEREO);
int bytes_per_sample = ffmpeg.av_get_bytes_per_sample(AVSampleFormat.AV_SAMPLE_FMT_S16) * nb_channels;
if((Ret = ffmpeg.swr_convert_frame(Swr, OutFrame, Frame))>=0)
WritetoMs(*OutFrame->extended_data, 0, OutFrame->nb_samples * bytes_per_sample);
CheckRet();
Both code use a function to set Swr it runs one time after the first frame is decoded.
private void PrepareResampler()
{
ffmpeg.av_frame_copy_props(OutFrame, Frame);
OutFrame->channel_layout = ffmpeg.AV_CH_LAYOUT_STEREO;
OutFrame->format = (int)AVSampleFormat.AV_SAMPLE_FMT_S16;
OutFrame->sample_rate = Frame->sample_rate;
OutFrame->channels = 2;
Swr = ffmpeg.swr_alloc();
if (Swr == null)
throw new Exception("SWR = Null");
Ret = ffmpeg.swr_config_frame(Swr, OutFrame, Frame);
CheckRet();
Ret = ffmpeg.swr_init(Swr);
CheckRet();
Ret = ffmpeg.swr_is_initialized(Swr);
CheckRet();
}
This is where I take the output and put it in the sound effect
private void ReadAll()
{
using (Ms = new MemoryStream())
{
while (true)
{
Ret = ffmpeg.av_read_frame(Format, Packet);
if (Ret == ffmpeg.AVERROR_EOF)
break;
CheckRet();
Decode();
}
if (Ms.Length > 0)
{
se = new SoundEffect(Ms.ToArray(), 0, (int)Ms.Length, OutFrame->sample_rate, (AudioChannels)Channels, 0, 0);
//se.Duration; Stream->duration;
see = se.CreateInstance();
see.Play();
}
}
}
I found some code in C++ FFmpeg distorted sound when converting audio adapted it to c#. Slowly tried bits of it in my program. Turns out my decoder was doing something wrong. So all the attempts at resampling or encoding were going to fail.
using FFmpeg.AutoGen;
using System;
using System.IO;
namespace ConsoleApp1
{
//adapted using code from https://stackoverflow.com/questions/32051847/c-ffmpeg-distorted-sound-when-converting-audio?rq=1
public unsafe class Program
{
public static AVStream* in_audioStream { get; private set; }
static unsafe void die(string str)
{
throw new Exception(str);
}
private static unsafe AVStream* add_audio_stream(AVFormatContext* oc, AVCodecID codec_id, int sample_rate = 44100)
{
AVCodecContext* c;
AVCodec* encoder = ffmpeg.avcodec_find_encoder(codec_id);
AVStream* st = ffmpeg.avformat_new_stream(oc, encoder);
if (st == null)
{
die("av_new_stream");
}
c = st->codec;
c->codec_id = codec_id;
c->codec_type = AVMediaType.AVMEDIA_TYPE_AUDIO;
/* put sample parameters */
c->bit_rate = 64000;
c->sample_rate = sample_rate;
c->channels = 2;
c->sample_fmt = encoder->sample_fmts[0];
c->channel_layout = ffmpeg.AV_CH_LAYOUT_STEREO;
// some formats want stream headers to be separate
if ((oc->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0)
{
c->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
}
return st;
}
private static unsafe void open_audio(AVFormatContext* oc, AVStream* st)
{
AVCodecContext* c = st->codec;
AVCodec* codec;
/* find the audio encoder */
codec = ffmpeg.avcodec_find_encoder(c->codec_id);
if (codec == null)
{
die("avcodec_find_encoder");
}
/* open it */
AVDictionary* dict = null;
ffmpeg.av_dict_set(&dict, "strict", "+experimental", 0);
int res = ffmpeg.avcodec_open2(c, codec, &dict);
if (res < 0)
{
die("avcodec_open");
}
}
public static int DecodeNext(AVCodecContext* avctx, AVFrame* frame, ref int got_frame_ptr, AVPacket* avpkt)
{
int ret = 0;
got_frame_ptr = 0;
if ((ret = ffmpeg.avcodec_receive_frame(avctx, frame)) == 0)
{
//0 on success, otherwise negative error code
got_frame_ptr = 1;
}
else if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN))
{
//AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_packet()
//(once all output is read, the packet should be resent, and the call will not fail with EAGAIN)
ret = Decode(avctx, frame, ref got_frame_ptr, avpkt);
}
else if (ret == ffmpeg.AVERROR_EOF)
{
die("AVERROR_EOF: the encoder has been flushed, and no new frames can be sent to it");
}
else if (ret == ffmpeg.AVERROR(ffmpeg.EINVAL))
{
die("AVERROR(EINVAL): codec not opened, refcounted_frames not set, it is a decoder, or requires flush");
}
else if (ret == ffmpeg.AVERROR(ffmpeg.ENOMEM))
{
die("Failed to add packet to internal queue, or similar other errors: legitimate decoding errors");
}
else
{
die("unknown");
}
return ret;
}
public static int Decode(AVCodecContext* avctx, AVFrame* frame, ref int got_frame_ptr, AVPacket* avpkt)
{
int ret = 0;
got_frame_ptr = 0;
if ((ret = ffmpeg.avcodec_send_packet(avctx, avpkt)) == 0)
{
//0 on success, otherwise negative error code
return DecodeNext(avctx, frame, ref got_frame_ptr, avpkt);
}
else if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN))
{
die("input is not accepted in the current state - user must read output with avcodec_receive_frame()(once all output is read, the packet should be resent, and the call will not fail with EAGAIN");
}
else if (ret == ffmpeg.AVERROR_EOF)
{
die("AVERROR_EOF: the decoder has been flushed, and no new packets can be sent to it (also returned if more than 1 flush packet is sent");
}
else if (ret == ffmpeg.AVERROR(ffmpeg.EINVAL))
{
die("codec not opened, it is an encoder, or requires flush");
}
else if (ret == ffmpeg.AVERROR(ffmpeg.ENOMEM))
{
die("Failed to add packet to internal queue, or similar other errors: legitimate decoding errors");
}
else
{
die("unknown");
}
return ret;//ffmpeg.avcodec_decode_audio4(fileCodecContext, audioFrameDecoded, &frameFinished, &inPacket);
}
public static int DecodeFlush(AVCodecContext* avctx, AVPacket* avpkt)
{
avpkt->data = null;
avpkt->size = 0;
return ffmpeg.avcodec_send_packet(avctx, avpkt);
}
public static int EncodeNext(AVCodecContext* avctx, AVPacket* avpkt, AVFrame* frame, ref int got_packet_ptr)
{
int ret = 0;
got_packet_ptr = 0;
if ((ret = ffmpeg.avcodec_receive_packet(avctx, avpkt)) == 0)
{
got_packet_ptr = 1;
//0 on success, otherwise negative error code
}
else if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN))
{
//output is not available in the current state - user must try to send input
return Encode(avctx, avpkt, frame, ref got_packet_ptr);
}
else if (ret == ffmpeg.AVERROR_EOF)
{
die("AVERROR_EOF: the encoder has been fully flushed, and there will be no more output packets");
}
else if (ret == ffmpeg.AVERROR(ffmpeg.EINVAL))
{
die("AVERROR(EINVAL) codec not opened, or it is an encoder other errors: legitimate decoding errors");
}
else
{
die("unknown");
}
return ret;//ffmpeg.avcodec_encode_audio2(audioCodecContext, &outPacket, audioFrameConverted, &frameFinished)
}
public static int Encode(AVCodecContext* avctx, AVPacket* avpkt, AVFrame* frame, ref int got_packet_ptr)
{
int ret = 0;
got_packet_ptr = 0;
if ((ret = ffmpeg.avcodec_send_frame(avctx, frame)) == 0)
{
//0 on success, otherwise negative error code
return EncodeNext(avctx, avpkt, frame, ref got_packet_ptr);
}
else if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN))
{
die("input is not accepted in the current state - user must read output with avcodec_receive_packet() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN)");
}
else if (ret == ffmpeg.AVERROR_EOF)
{
die("AVERROR_EOF: the decoder has been flushed, and no new packets can be sent to it (also returned if more than 1 flush packet is sent");
}
else if (ret == ffmpeg.AVERROR(ffmpeg.EINVAL))
{
die("AVERROR(ffmpeg.EINVAL) codec not opened, refcounted_frames not set, it is a decoder, or requires flush");
}
else if (ret == ffmpeg.AVERROR(ffmpeg.ENOMEM))
{
die("AVERROR(ENOMEM) failed to add packet to internal queue, or similar other errors: legitimate decoding errors");
}
else
{
die("unknown");
}
return ret;//ffmpeg.avcodec_encode_audio2(audioCodecContext, &outPacket, audioFrameConverted, &frameFinished)
}
public static int EncodeFlush(AVCodecContext* avctx)
{
return ffmpeg.avcodec_send_frame(avctx, null);
}
public static void Main(string[] argv)
{
//ffmpeg.av_register_all();
if (argv.Length != 2)
{
//fprintf(stderr, "%s <in> <out>\n", argv[0]);
return;
}
// Allocate and init re-usable frames
AVCodecContext* fileCodecContext, audioCodecContext;
AVFormatContext* formatContext, outContext;
AVStream* out_audioStream;
SwrContext* swrContext;
int streamId;
// input file
string file = argv[0];
int res = ffmpeg.avformat_open_input(&formatContext, file, null, null);
if (res != 0)
{
die("avformat_open_input");
}
res = ffmpeg.avformat_find_stream_info(formatContext, null);
if (res < 0)
{
die("avformat_find_stream_info");
}
AVCodec* codec;
res = ffmpeg.av_find_best_stream(formatContext, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &codec, 0);
if (res < 0)
{
return; // die("av_find_best_stream");
}
streamId = res;
fileCodecContext = ffmpeg.avcodec_alloc_context3(codec);
AVCodecParameters* cp = null;
ffmpeg.avcodec_parameters_to_context(fileCodecContext, formatContext->streams[streamId]->codecpar);
res = ffmpeg.avcodec_open2(fileCodecContext, codec, null);
if (res < 0)
{
die("avcodec_open2");
}
in_audioStream = formatContext->streams[streamId];
// output file
//string outfile = Path.Combine(Path.GetTempPath(), $"{Path.GetFileNameWithoutExtension(argv[0])}.pcm");
//AVOutputFormat* fmt = fmt = ffmpeg.av_guess_format("s16le", null, null);
string outfile = argv[1];
AVOutputFormat * fmt = fmt = ffmpeg.av_guess_format(null, outfile, null);
if (fmt == null)
{
die("av_guess_format");
}
outContext = ffmpeg.avformat_alloc_context();
outContext->oformat = fmt;
out_audioStream = add_audio_stream(outContext, fmt->audio_codec, in_audioStream->codec->sample_rate);
open_audio(outContext, out_audioStream);
out_audioStream->time_base = in_audioStream->time_base;
res = ffmpeg.avio_open2(&outContext->pb, outfile, ffmpeg.AVIO_FLAG_WRITE, null, null);
if (res < 0)
{
die("url_fopen");
}
ffmpeg.avformat_write_header(outContext, null);
AVCodec* ocodec;
res = ffmpeg.av_find_best_stream(outContext, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &ocodec, 0);
audioCodecContext = ffmpeg.avcodec_alloc_context3(ocodec);
ffmpeg.avcodec_parameters_to_context(audioCodecContext, out_audioStream->codecpar);
res = ffmpeg.avcodec_open2(audioCodecContext, ocodec, null);
if (res < 0)
{
die("avcodec_open2");
}
// resampling
swrContext = ffmpeg.swr_alloc();
ffmpeg.av_opt_set_channel_layout(swrContext, "in_channel_layout", (long)fileCodecContext->channel_layout, 0);
ffmpeg.av_opt_set_channel_layout(swrContext, "out_channel_layout", (long)audioCodecContext->channel_layout, 0);
ffmpeg.av_opt_set_int(swrContext, "in_sample_rate", fileCodecContext->sample_rate, 0);
ffmpeg.av_opt_set_int(swrContext, "out_sample_rate", audioCodecContext->sample_rate, 0);
ffmpeg.av_opt_set_sample_fmt(swrContext, "in_sample_fmt", fileCodecContext->sample_fmt, 0);
ffmpeg.av_opt_set_sample_fmt(swrContext, "out_sample_fmt", audioCodecContext->sample_fmt, 0);
res = ffmpeg.swr_init(swrContext);
if (res < 0)
{
die("swr_init");
}
AVFrame* audioFrameDecoded = ffmpeg.av_frame_alloc();
if (audioFrameDecoded == null)
{
die("Could not allocate audio frame");
}
audioFrameDecoded->format = (int)fileCodecContext->sample_fmt;
audioFrameDecoded->channel_layout = fileCodecContext->channel_layout;
audioFrameDecoded->channels = fileCodecContext->channels;
audioFrameDecoded->sample_rate = fileCodecContext->sample_rate;
AVFrame* audioFrameConverted = ffmpeg.av_frame_alloc();
if (audioFrameConverted == null)
{
die("Could not allocate audio frame");
}
audioFrameConverted->nb_samples = audioCodecContext->frame_size;
audioFrameConverted->format = (int)audioCodecContext->sample_fmt;
audioFrameConverted->channel_layout = audioCodecContext->channel_layout;
audioFrameConverted->channels = audioCodecContext->channels;
audioFrameConverted->sample_rate = audioCodecContext->sample_rate;
if (audioFrameConverted->nb_samples <= 0)
{
audioFrameConverted->nb_samples = 32;
}
AVPacket inPacket;
ffmpeg.av_init_packet(&inPacket);
inPacket.data = null;
inPacket.size = 0;
int frameFinished = 0;
for (; ; )
{
if (ffmpeg.av_read_frame(formatContext, &inPacket) < 0)
{
break;
}
if (inPacket.stream_index == streamId)
{
int len = Decode(fileCodecContext, audioFrameDecoded, ref frameFinished, &inPacket);
if (len == ffmpeg.AVERROR_EOF)
{
break;
}
if (frameFinished != 0)
{
// Convert
byte* convertedData = null;
if (ffmpeg.av_samples_alloc(&convertedData,
null,
audioCodecContext->channels,
audioFrameConverted->nb_samples,
audioCodecContext->sample_fmt, 0) < 0)
{
die("Could not allocate samples");
}
int outSamples = 0;
fixed (byte** tmp = (byte*[])audioFrameDecoded->data)
{
outSamples = ffmpeg.swr_convert(swrContext, null, 0,
//&convertedData,
//audioFrameConverted->nb_samples,
tmp,
audioFrameDecoded->nb_samples);
}
if (outSamples < 0)
{
die("Could not convert");
}
for (; ; )
{
outSamples = ffmpeg.swr_get_out_samples(swrContext, 0);
if ((outSamples < audioCodecContext->frame_size * audioCodecContext->channels) || audioCodecContext->frame_size == 0 && (outSamples < audioFrameConverted->nb_samples * audioCodecContext->channels))
{
break; // see comments, thanks to #dajuric for fixing this
}
outSamples = ffmpeg.swr_convert(swrContext,
&convertedData,
audioFrameConverted->nb_samples, null, 0);
int buffer_size = ffmpeg.av_samples_get_buffer_size(null,
audioCodecContext->channels,
audioFrameConverted->nb_samples,
audioCodecContext->sample_fmt,
0);
if (buffer_size < 0)
{
die("Invalid buffer size");
}
if (ffmpeg.avcodec_fill_audio_frame(audioFrameConverted,
audioCodecContext->channels,
audioCodecContext->sample_fmt,
convertedData,
buffer_size,
0) < 0)
{
die("Could not fill frame");
}
AVPacket outPacket;
ffmpeg.av_init_packet(&outPacket);
outPacket.data = null;
outPacket.size = 0;
if (Encode(audioCodecContext, &outPacket, audioFrameConverted, ref frameFinished) < 0)
{
die("Error encoding audio frame");
}
//outPacket.flags |= ffmpeg.AV_PKT_FLAG_KEY;
outPacket.stream_index = out_audioStream->index;
//outPacket.data = audio_outbuf;
outPacket.dts = audioFrameDecoded->pkt_dts;
outPacket.pts = audioFrameDecoded->pkt_pts;
ffmpeg.av_packet_rescale_ts(&outPacket, in_audioStream->time_base, out_audioStream->time_base);
if (frameFinished != 0)
{
if (ffmpeg.av_interleaved_write_frame(outContext, &outPacket) != 0)
{
die("Error while writing audio frame");
}
ffmpeg.av_packet_unref(&outPacket);
}
}
}
}
}
EncodeFlush(audioCodecContext);
DecodeFlush(fileCodecContext, &inPacket);
ffmpeg.swr_close(swrContext);
ffmpeg.swr_free(&swrContext);
ffmpeg.av_frame_free(&audioFrameConverted);
ffmpeg.av_frame_free(&audioFrameDecoded);
ffmpeg.av_packet_unref(&inPacket);
ffmpeg.av_write_trailer(outContext);
ffmpeg.avio_close(outContext->pb);
ffmpeg.avcodec_close(fileCodecContext);
ffmpeg.avcodec_free_context(&fileCodecContext);
ffmpeg.avformat_close_input(&formatContext);
return;
}
}
}

How can I get the data of 8bit wav file

I'm making a demo about sound in WindowsForm, I created 3 classes for taking data of wave file. Code is below:
RiffBlock
public class RiffBlock
{
private byte[] riffID;
private uint riffSize;
private byte[] riffFormat;
public byte[] RiffID
{
get { return riffID; }
}
public uint RiffSize
{
get { return (riffSize); }
}
public byte[] RiffFormat
{
get { return riffFormat; }
}
public RiffBlock()
{
riffID = new byte[4];
riffFormat = new byte[4];
}
public void ReadRiff(FileStream inFS)
{
inFS.Read(riffID, 0, 4);
BinaryReader binRead = new BinaryReader(inFS);
riffSize = binRead.ReadUInt32();
inFS.Read(riffFormat, 0, 4);
}
}
FormatBlock
public class FormatBlock
{
private byte[] fmtID;
private uint fmtSize;
private ushort fmtTag;
private ushort fmtChannels;
private uint fmtSamplesPerSec;
private uint fmtAverageBytesPerSec;
private ushort fmtBlockAlign;
private ushort fmtBitsPerSample;
public byte[] FmtID
{
get { return fmtID; }
}
public uint FmtSize
{
get { return fmtSize; }
}
public ushort FmtTag
{
get { return fmtTag; }
}
public ushort Channels
{
get { return fmtChannels; }
}
public uint SamplesPerSec
{
get { return fmtSamplesPerSec; }
}
public uint AverageBytesPerSec
{
get { return fmtAverageBytesPerSec; }
}
public ushort BlockAlign
{
get { return fmtBlockAlign; }
}
public ushort BitsPerSample
{
get { return fmtBitsPerSample; }
}
public FormatBlock()
{
fmtID = new byte[4];
}
public void ReadFmt(FileStream inFS)
{
inFS.Read(fmtID, 0, 4);
BinaryReader binRead = new BinaryReader(inFS);
fmtSize = binRead.ReadUInt32();
fmtTag = binRead.ReadUInt16();
fmtChannels = binRead.ReadUInt16();
fmtSamplesPerSec = binRead.ReadUInt32();
fmtAverageBytesPerSec = binRead.ReadUInt32();
fmtBlockAlign = binRead.ReadUInt16();
fmtBitsPerSample = binRead.ReadUInt16();
// This accounts for the variable format header size
// 12 bytes of Riff Header, 4 bytes for FormatId, 4 bytes for FormatSize & the Actual size of the Format Header
inFS.Seek(fmtSize + 20, System.IO.SeekOrigin.Begin);
}
}
DataBlock
public class DataBlock
{
private byte[] dataID;
private uint dataSize;
private Int16[] data;
private int dataNumSamples;
public byte[] DataID
{
get { return dataID; }
}
public uint DataSize
{
get { return dataSize; }
}
public Int16 this[int pos]
{
get { return data[pos]; }
}
public int NumSamples
{
get { return dataNumSamples; }
}
public DataBlock()
{
dataID = new byte[4];
}
public void ReadData(FileStream inFS)
{
inFS.Read(dataID, 0, 4);
BinaryReader binRead = new BinaryReader(inFS);
dataSize = binRead.ReadUInt32();
data = new Int16[dataSize];
inFS.Seek(40, SeekOrigin.Begin);
dataNumSamples = (int)(dataSize / 2);
for (int i = 0; i < dataNumSamples; i++)
{
data[i] = binRead.ReadInt16();
}
}
}
It works ok with only 16bit wave file, but when I choose a 8 bit wav file or another, the result of this command dataSize = binRead.ReadUInt32();is only 4 although the file size is big.
How I can get the data of 8bit, 24bit... wav file?
Some solutions is appreciated, thank you very much.
Your reading methodology is flawed. The length is correct but for an 8 bits per sample file you should be reading bytes not words; as it stands the data will be incorrect and the value returned by the NumSamples property will be wrong.
In my case, the sub chunk size is 1160, the number of channels is 1 (mono) and the bits per sample is 8 (byte). You will need to decode the bits per sample and adjust your reading accordingly. For the WAV file I used, your program allocated a data array the correct length but 16 bit, divided the data length by 2 and called this the number of samples (wrong, it should be 1160) and then proceeded to read 580 word values from the stream.
Edit: My ancient code will not cut it in the modern age (I seem to recall having to modify it some years ago to cope with at least one additional chunk type but the details escape me).
This is what you get; anything more and your question should read "Could someone write me a program to load WAV files", as it is, we are way beyond the original question and it is time for you to knuckle down and make it work how you need it to :-)
References:
http://www.neurophys.wisc.edu/auditory/riff-format.txt
http://www-mmsp.ece.mcgill.ca/Documents/AudioFormats/WAVE/WAVE.html
http://www.shikadi.net/moddingwikiResource_Interchange_File_Format_(RIFF)
http://soundfile.sapp.org/doc/WaveFormat/
All are very useful.
///<summary>
///Reads up to 16 bit WAV files
///</summary>
///<remarks> Things have really changed in the last 15 years</remarks>
public class RiffLoader
{
public enum RiffStatus { Unknown = 0, OK, FileError, FormatError, UnsupportedFormat };
LinkedList<Chunk> chunks = new LinkedList<Chunk>();
RiffStatus status = RiffStatus.Unknown;
List<String> errorMessages = new List<string>();
String source;
public String SourceName { get { return source; } }
public RiffStatus Status { get { return status; } }
public String[] Messages { get { return errorMessages.ToArray(); } }
enum chunkType { Unknown = 0, NoMore, Riff, Fmt, Fact, Data, Error = -1 };
static Int32 scan32bits(byte[] source, int offset = 0)
{
return source[offset] | source[offset + 1] << 8 | source[offset + 2] << 16 | source[offset + 3] << 24;
}
static Int32 scan16bits(byte[] source, int offset = 0)
{
return source[offset] | source[offset + 1] << 8;
}
static Int32 scan8bits(byte[] source, int offset = 0)
{
return source[offset];
}
abstract class Chunk
{
public chunkType Ident = chunkType.Unknown;
public int ByteCount = 0;
}
class RiffChunk : Chunk
{
public RiffChunk(int count)
{
this.Ident = chunkType.Riff;
this.ByteCount = count;
}
}
class FmtChunk : Chunk
{
int formatCode = 0;
int channels = 0;
int samplesPerSec = 0;
int avgBytesPerSec = 0;
int blockAlign = 0;
int bitsPerSample = 0;
int significantBits = 0;
public int Format { get { return formatCode; } }
public int Channels { get { return channels; } }
public int BlockAlign { get { return blockAlign; } }
public int BytesPerSample { get { return bitsPerSample / 8 + ((bitsPerSample % 8) > 0 ? 1 : 0); } }
public int BitsPerSample
{
get
{
if (significantBits > 0)
return significantBits;
return bitsPerSample;
}
}
public FmtChunk(byte[] buffer) : base()
{
int size = buffer.Length;
// if the length is 18 then buffer 16,17 should be 00 00 (I don't bother checking)
if (size != 16 && size != 18 && size != 40)
return;
formatCode = scan16bits(buffer, 0);
channels = scan16bits(buffer, 2);
samplesPerSec = scan32bits(buffer, 4);
avgBytesPerSec = scan32bits(buffer, 8);
blockAlign = scan16bits(buffer, 12);
bitsPerSample = scan16bits(buffer, 14);
if (formatCode == 0xfffe) // EXTENSIBLE
{
if (size != 40)
return;
significantBits = scan16bits(buffer, 18);
// skiping speaker map
formatCode = scan16bits(buffer, 24); // first two bytes of the GUID
// the rest of the GUID is fixed, decode it and check it if you wish
}
this.Ident = chunkType.Fmt;
this.ByteCount = size;
}
}
class DataChunk : Chunk
{
byte[] samples = null;
///<summary>
///Create a data chunk
///<para>
///The supplied buffer must be correctly sized with zero offset and must be purely for this class
///</para>
///<summary>
///<param name="buffer">source array</param>
public DataChunk(byte[] buffer)
{
this.Ident = chunkType.Data;
this.ByteCount = buffer.Length;
samples = buffer;
}
public enum SampleStatus { OK, Duff }
public class Samples
{
public SampleStatus Status = SampleStatus.Duff;
public List<int[]> Channels = new List<int[]>();
#if false // debugger helper method
/*
** Change #if false to #if true to include this
** Break at end of GetSamples on "return retval"
** open immediate window and type retval.DumpLast(16)
** look in output window for dump of last 16 entries
*/
public int DumpLast(int count)
{
for (int i = Channels[0].Length - count; i < Channels[0].Length; i++)
Console.WriteLine(String.Format("{0:X4} {1:X4},{2:X4}", i, Channels[0][i], Channels[1][i]));
return 0;
}
#endif
}
/*
** Return the decoded samples
*/
public Samples GetSamples(FmtChunk format)
{
Samples retval = new Samples();
int samplesPerChannel = this.ByteCount / (format.BytesPerSample * format.Channels);
int mask = 0, sign=0;
int [][] samples = new int [format.Channels][];
for (int c = 0; c < format.Channels; c++)
samples[c] = new int[samplesPerChannel];
if (format.BitsPerSample >= 8 && format.BitsPerSample <= 16) // 24+ is left as an excercise
{
sign = (int)Math.Floor(Math.Pow(2, format.BitsPerSample - 1));
mask = (int)Math.Floor(Math.Pow(2, format.BitsPerSample)) - 1;
int offset = 0, index = 0;
int s = 0;
while (index < samplesPerChannel)
{
for (int c = 0; c < format.Channels; c++)
{
switch (format.BytesPerSample)
{
case 1:
s = scan8bits(this.samples, offset) & mask;
break;
case 2:
s = scan16bits(this.samples, offset) & mask;
break;
}
// sign extend the data to Int32
samples[c][index] = s | ((s & sign) != 0 ? ~mask : 0);
offset += format.BytesPerSample;
}
++index;
}
retval.Channels = new List<int[]>(samples);
retval.Status = SampleStatus.OK;
}
return retval;
}
}
class FactChunk : Chunk
{
int samplesPerChannel;
public int SamplesPerChannel { get { return samplesPerChannel; } }
public FactChunk(byte[] buffer)
{
this.Ident = chunkType.Fact;
this.ByteCount = buffer.Length;
if (buffer.Length >= 4)
samplesPerChannel = scan32bits(buffer);
}
}
class DummyChunk : Chunk
{
public DummyChunk(int size, chunkType type = chunkType.Unknown)
{
this.Ident = type;
this.ByteCount = size;
}
}
public RiffLoader(String fileName)
{
source = fileName;
try
{
using (FileStream fs = new FileStream(fileName, FileMode.Open, FileAccess.Read))
using (BinaryReader reader = new BinaryReader(fs))
{
Chunk c = getChunk(fs, reader);
if (c.Ident != chunkType.Riff)
{
status = RiffStatus.FileError;
errorMessages.Add(String.Format("Error loading \"{0}\": No valid header"));
}
chunks.AddLast(c);
c = getChunk(fs, reader);
if (c.Ident != chunkType.Fmt)
{
status = RiffStatus.FileError;
errorMessages.Add(String.Format("Error loading \"{0}\": No format chunk"));
}
chunks.AddLast(c);
/*
** From now on we just keep scanning to the end of the file
*/
while (fs.Position < fs.Length)
{
c = getChunk(fs, reader);
switch (c.Ident)
{
case chunkType.Fact:
case chunkType.Data:
chunks.AddLast(c);
break;
case chunkType.Unknown:
break; // skip it - don't care what it is
}
}
FmtChunk format = null;
foreach (var chunk in chunks)
{
switch(chunk.Ident)
{
case chunkType.Fmt:
format = chunk as FmtChunk;
break;
case chunkType.Data:
if (format != null)
{
DataChunk dc = chunk as DataChunk;
var x = dc.GetSamples(format);
}
break;
}
}
}
}
catch (Exception e)
{
status = RiffStatus.FileError;
errorMessages.Add(String.Format("Error loading \"{0}\": {1}", e.Message));
}
}
/*
** Get a chunk of data from the file - knows nothing of the internal format of the chunk.
*/
Chunk getChunk(FileStream stream, BinaryReader reader)
{
byte[] buffer;
int size;
buffer = reader.ReadBytes(8);
if (buffer.Length == 8)
{
String prefix = new String(Encoding.ASCII.GetChars(buffer, 0, 4));
size = scan32bits(buffer, 4);
if (size + stream.Position <= stream.Length) // skip if there isn't enough data
{
if (String.Compare(prefix, "RIFF") == 0)
{
/*
** only "WAVE" type is acceptable
**
** Don't read size bytes or the entire file will end up in the RIFF chunk
*/
if (size >= 4)
{
buffer = reader.ReadBytes(4);
String ident = new String(Encoding.ASCII.GetChars(buffer, 0, 4));
if (String.CompareOrdinal(ident, "WAVE") == 0)
return new RiffChunk(size - 4);
}
}
else if (String.Compare(prefix, "fmt ") == 0)
{
if (size >= 16)
{
buffer = reader.ReadBytes(size);
if (buffer.Length == size)
return new FmtChunk(buffer);
}
}
else if (String.Compare(prefix, "fact") == 0)
{
if (size >= 4)
{
buffer = reader.ReadBytes(4);
if (buffer.Length == size)
return new FactChunk(buffer);
}
}
else if (String.Compare(prefix, "data") == 0)
{
// assume that there has to be data
if (size > 0)
{
buffer = reader.ReadBytes(size);
if ((size & 1) != 0) // odd length?
{
if (stream.Position < stream.Length)
reader.ReadByte();
else
size = -1; // force an error - there should be a pad byte
}
if (buffer.Length == size)
return new DataChunk(buffer);
}
}
else
{
/*
** there are a number of weird and wonderful block types - assume there has to be data
*/
if (size > 0)
{
buffer = reader.ReadBytes(size);
if ((size & 1) != 0) // odd length?
{
if (stream.Position < stream.Length)
reader.ReadByte();
else
size = -1; // force an error - there should be a pad byte
}
if (buffer.Length == size)
{
DummyChunk skip = new DummyChunk(size);
return skip;
}
}
}
}
}
return new DummyChunk(0, chunkType.Error);
}
}
You will need to add properties as required and code to navigate the returned linked list. Particular properties you may need are the sample rates in the format block, assuming you are going to process the data in some way.
Adding 24 to 32 bits is simple, if you need to go beyond 32 bits you will have to switch to int64's.
I have tested it with some good samples from http://www-mmsp.ece.mcgill.ca/Documents/AudioFormats/WAVE/Samples.html, as well as your 8 bit file, and it decodes OK and seems to have the correct data.
You should be more than capable of making it work now, good luck.
Edit (again, like the proverbial dog...):
Of course, I should have sign extended the value, so DataChunk.GetSamples() now does that. Looking at the Codeproject files (it isn't the greatest code by the way, but the guy does say that he is only just learning C#, so fair play for tackling a graphical user control) it is obvious that the data is signed. It's a shame that he didn't standardise his source to be an array of Int32's, then it wouldn't matter how many bits the WAV was encoded in.
You could use the Naudio library:
https://naudio.codeplex.com/ (take a look at their website, there are quite a lot of tutorials).
Hope this helps :).

Capture screenshot of fullscreen DX11 program using SharpDX and EasyHook

Before anybody mentions it, I refered to this link to find out how I needed to copy the backbuffer to a bitmap.
Current situation
I am injected to the target process
Target process' FeatureLevel = Level_11_0
Target SwapChain is being made with DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH flag.
SwapChain::Present function is hooked.
Screenshot turns out black and target process crashes. without screenshot process runs fine.
Desired situation
Make the screenshot properly and let the target process continue with its normal execution.
Code
NOTE Hook class is the same as in the link. I only added an UnmodifiableHook version of it which does what its name says. I left out all unimportant bits.
TestSwapChainHook.cs
using System;
using System.Runtime.InteropServices;
namespace Test
{
public sealed class TestSwapChainHook : IDisposable
{
private enum IDXGISwapChainVirtualTable
{
QueryInterface = 0,
AddRef = 1,
Release = 2,
SetPrivateData = 3,
SetPrivateDataInterface = 4,
GetPrivateData = 5,
GetParent = 6,
GetDevice = 7,
Present = 8,
GetBuffer = 9,
SetFullscreenState = 10,
GetFullscreenState = 11,
GetDesc = 12,
ResizeBuffers = 13,
ResizeTarget = 14,
GetContainingOutput = 15,
GetFrameStatistics = 16,
GetLastPresentCount = 17,
}
public static readonly int VIRTUAL_METHOD_COUNT_LEVEL_DEFAULT = 18;
private static IntPtr[] SWAP_CHAIN_VIRTUAL_TABLE_ADDRESSES;
[UnmanagedFunctionPointer(CallingConvention.StdCall, CharSet = CharSet.Unicode, SetLastError = true)]
public delegate int DXGISwapChainPresentDelegate(IntPtr thisPtr, uint syncInterval, SharpDX.DXGI.PresentFlags flags);
public delegate int DXGISwapChainPresentHookDelegate(UnmodifiableHook<DXGISwapChainPresentDelegate> hook, IntPtr thisPtr, uint syncInterval, SharpDX.DXGI.PresentFlags flags);
private DXGISwapChainPresentHookDelegate _present;
private Hook<DXGISwapChainPresentDelegate> presentHook;
static TestSwapChainHook()
{
SharpDX.DXGI.Rational rational = new SharpDX.DXGI.Rational(60, 1);
SharpDX.DXGI.ModeDescription modeDescription = new SharpDX.DXGI.ModeDescription(100, 100, rational, SharpDX.DXGI.Format.R8G8B8A8_UNorm);
SharpDX.DXGI.SampleDescription sampleDescription = new SharpDX.DXGI.SampleDescription(1, 0);
using (SharpDX.Windows.RenderForm renderForm = new SharpDX.Windows.RenderForm())
{
SharpDX.DXGI.SwapChainDescription swapChainDescription = new SharpDX.DXGI.SwapChainDescription();
swapChainDescription.BufferCount = 1;
swapChainDescription.Flags = SharpDX.DXGI.SwapChainFlags.None;
swapChainDescription.IsWindowed = true;
swapChainDescription.ModeDescription = modeDescription;
swapChainDescription.OutputHandle = renderForm.Handle;
swapChainDescription.SampleDescription = sampleDescription;
swapChainDescription.SwapEffect = SharpDX.DXGI.SwapEffect.Discard;
swapChainDescription.Usage = SharpDX.DXGI.Usage.RenderTargetOutput;
SharpDX.Direct3D11.Device device = null;
SharpDX.DXGI.SwapChain swapChain = null;
SharpDX.Direct3D11.Device.CreateWithSwapChain(SharpDX.Direct3D.DriverType.Hardware, SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport, swapChainDescription, out device, out swapChain);
try
{
IntPtr swapChainVirtualTable = Marshal.ReadIntPtr(swapChain.NativePointer);
SWAP_CHAIN_VIRTUAL_TABLE_ADDRESSES = new IntPtr[VIRTUAL_METHOD_COUNT_LEVEL_DEFAULT];
for (int x = 0; x < VIRTUAL_METHOD_COUNT_LEVEL_DEFAULT; x++)
{
SWAP_CHAIN_VIRTUAL_TABLE_ADDRESSES[x] = Marshal.ReadIntPtr(swapChainVirtualTable, x * IntPtr.Size);
}
device.Dispose();
swapChain.Dispose();
}
catch (Exception)
{
if (device != null)
{
device.Dispose();
}
if (swapChain != null)
{
swapChain.Dispose();
}
throw;
}
}
}
public TestSwapChainHook()
{
this._present = null;
this.presentHook = new Hook<DXGISwapChainPresentDelegate>(
SWAP_CHAIN_VIRTUAL_TABLE_ADDRESSES[(int)IDXGISwapChainVirtualTable.Present],
new DXGISwapChainPresentDelegate(hookPresent),
this);
}
public void activate()
{
this.presentHook.activate();
}
public void deactivate()
{
this.presentHook.deactivate();
}
private int hookPresent(IntPtr thisPtr, uint syncInterval, SharpDX.DXGI.PresentFlags flags)
{
lock (this.presentHook)
{
if (this._present == null)
{
return this.presentHook.original(thisPtr, syncInterval, flags);
}
else
{
return this._present(new UnmodifiableHook<DXGISwapChainPresentDelegate>(this.presentHook), thisPtr, syncInterval, flags);
}
}
}
public DXGISwapChainPresentHookDelegate present
{
get
{
lock (this.presentHook)
{
return this._present;
}
}
set
{
lock (this.presentHook)
{
this._present = value;
}
}
}
}
}
Using code
initialization
private TestSwapChain swapChainHook;
private bool capture = false;
private object captureLock = new object();
this.swapChainHook = new TestSwapChainHook();
this.swapChainHook.present = presentHook;
this.swapChainHook.activate();
EDIT
I used a different method to capture a screenshot described in this link. However my screenshot turns out like this:
Now this seems to be a problem with my conversion settings or whatever but I'm unable to find out what exactly I need to do to fix it. I know that the surface I'm converting to a bitmap uses the DXGI_FORMAT_R10G10B10A2_UNORM format (32-bits, 10 bits per color and 2 for alpha I think?). But I'm not sure how this even works in the for loops (skipping bytes and stuff). I just plain copy pasted it.
new hook function
private int presentHook(UnmodifiableHook<IDXGISwapChainHook.DXGISwapChainPresentDelegate> hook, IntPtr thisPtr, uint syncInterval, SharpDX.DXGI.PresentFlags flags)
{
try
{
lock (this.captureLock)
{
if (this.capture)
{
SharpDX.DXGI.SwapChain swapChain = (SharpDX.DXGI.SwapChain)thisPtr;
using (SharpDX.Direct3D11.Texture2D backBuffer = swapChain.GetBackBuffer<SharpDX.Direct3D11.Texture2D>(0))
{
SharpDX.Direct3D11.Texture2DDescription texture2DDescription = backBuffer.Description;
texture2DDescription.CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.Read;
texture2DDescription.Usage = SharpDX.Direct3D11.ResourceUsage.Staging;
texture2DDescription.OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None;
texture2DDescription.BindFlags = SharpDX.Direct3D11.BindFlags.None;
using (SharpDX.Direct3D11.Texture2D texture = new SharpDX.Direct3D11.Texture2D(backBuffer.Device, texture2DDescription))
{
//DXGI_FORMAT_R10G10B10A2_UNORM
backBuffer.Device.ImmediateContext.CopyResource(backBuffer, texture);
using (SharpDX.DXGI.Surface surface = texture.QueryInterface<SharpDX.DXGI.Surface>())
{
SharpDX.DataStream dataStream;
SharpDX.DataRectangle map = surface.Map(SharpDX.DXGI.MapFlags.Read, out dataStream);
try
{
byte[] pixelData = new byte[surface.Description.Width * surface.Description.Height * 4];
int lines = (int)(dataStream.Length / map.Pitch);
int dataCounter = 0;
int actualWidth = surface.Description.Width * 4;
for (int y = 0; y < lines; y++)
{
for (int x = 0; x < map.Pitch; x++)
{
if (x < actualWidth)
{
pixelData[dataCounter++] = dataStream.Read<byte>();
}
else
{
dataStream.Read<byte>();
}
}
}
GCHandle handle = GCHandle.Alloc(pixelData, GCHandleType.Pinned);
try
{
using (Bitmap bitmap = new Bitmap(surface.Description.Width, surface.Description.Height, map.Pitch, PixelFormat.Format32bppArgb, handle.AddrOfPinnedObject()))
{
bitmap.Save(#"C:\Users\SOMEUSERNAME\Desktop\test.bmp");
}
}
finally
{
if (handle.IsAllocated)
{
handle.Free();
}
}
}
finally
{
surface.Unmap();
dataStream.Dispose();
}
}
}
}
this.capture = false;
}
}
}
catch(Exception ex)
{
MessageBox.Show(ex.ToString());
}
return hook.original(thisPtr, syncInterval, flags);
}
Answer
Turns out the DXGI_FORMAT_R10G10B10A2_UNORM format is in this bit format:
A=alpha
B=blue
G=green
R=red
AABBBBBB BBBBGGGG GGGGGGRR RRRRRRRR
And Format32bppArgb is in this byte order:
BGRA
So the final loop code would be:
while (pixelIndex < pixelData.Length)
{
uint currentPixel = dataStream.Read<uint>();
uint r = (currentPixel & 0x3FF);
uint g = (currentPixel & 0xFFC00) >> 10;
uint b = (currentPixel & 0x3FF00000) >> 20;
uint a = (currentPixel & 0xC0000000) >> 30;
pixelData[pixelIndex++] = (byte)(b >> 2);
pixelData[pixelIndex++] = (byte)(g >> 2);
pixelData[pixelIndex++] = (byte)(r >> 2);
pixelData[pixelIndex++] = (byte)(a << 6);
while ((pixelIndex % map.Pitch) >= actualWidth)
{
dataStream.Read<byte>();
pixelIndex++;
}
}
That screenshot does look like R10G10B10A2 is getting stuffed into R8G8B8A8. I haven't tested your code but we should have this bit layout
xxxxxxxx yyyyyyyy zzzzzzzz wwwwwwww
RRRRRRRR RRGGGGGG GGGGBBBB BBBBBBAA
and you can extract them as follows
byte x = data[ptr++];
byte y = data[ptr++];
byte z = data[ptr++];
byte w = data[ptr++];
int r = x << 2 | y >> 6;
int g = (y & 0x3F) << 4 | z >> 4;
int b = (z & 0xF) << 6 | w >> 2;
int a = w & 0x3;
where r, g, b now have 10 bit resolution. If you want to scale them back to bytes you can do that with (byte)(r >> 2).
Update
This would replace your double for loop. I have no way of testing this so I don't want to push it further, but I believe the idea is correct. The last check should skip the padding bytes in each row.
while(dataCounter < pixelData.Length)
{
byte x = dataStream.Read<byte>();
byte y = dataStream.Read<byte>();
byte z = dataStream.Read<byte>();
byte w = dataStream.Read<byte>();
int r = x << 2 | y >> 6;
int g = (y & 0x3F) << 4 | z >> 4;
int b = (z & 0xF) << 6 | w >> 2;
int a = w & 0x3;
pixelData[dataCounter++] = (byte)(r >> 2);
pixelData[dataCounter++] = (byte)(g >> 2);
pixelData[dataCounter++] = (byte)(b >> 2);
pixelData[dataCounter++] = (byte)(a << 6);
while((dataCounter % map.Pitch) >= actualWidth)
{
dataStream.Read<byte>();
dataCounter++;
}
}

C# Easyhook Winsock WS2_32.dll,connect hook Socks5

I'm trying to hook the winsock connect function and route the TCP connection through socks5 proxy /w auth.
This works if the socket is a blocking socket, but while using firefox ( nonblocking sockets ) I get a lot of 10035, 10022 winsock Errors.
How can i determine if it's a nonblocking / blocking socket?
I would really appreciate any hints or ideas to achieve the functionality to hook the wsock connect function and route tcp traffic through a socks5 server.
I can put the demo application on github if anybody wants to test it. ( Works with any version of firefox )
Edit1: https://github.com/duketwo/WinsockConnectHookSocks5/
( You have to edit the proxy information in WSockConnectHook/HookManager.cs and the path of firefox in Injector/MainForm.cs )
Edit2: It's easyhook which is causing the trouble, anything after the original function call doesn't work properly.
Edit3: Seems like i got it working with many flaws, in fact it is required differentiate between nonblocking sockets and blocking sockets. Any ideas how to achieve this?
Edit4: Windows doesn't offer any method to retrieve the blocking-attribute of a socket, so I might have to hook the ioctlsocket function to keep track of the blocking status of the sockets.
Thanks
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Runtime.InteropServices;
using EasyHook;
using System.IO;
using System.Windows.Forms;
namespace WSockConnectHook
{
public class WinSockConnectController : IDisposable, IHook
{
[UnmanagedFunctionPointer(CallingConvention.StdCall, CharSet = CharSet.Ansi, SetLastError = true)]
private delegate int WinsockConnectDelegate(IntPtr s, IntPtr addr, int addrsize);
[DllImport("WS2_32.dll", SetLastError = true)]
public static extern int connect(IntPtr s, IntPtr addr, int addrsize);
[StructLayout(LayoutKind.Sequential, Size = 16)]
public struct sockaddr_in
{
public const int Size = 16;
public short sin_family;
public ushort sin_port;
public struct in_addr
{
public uint S_addr;
public struct _S_un_b
{
public byte s_b1, s_b2, s_b3, s_b4;
}
public _S_un_b S_un_b;
public struct _S_un_w
{
public ushort s_w1, s_w2;
}
public _S_un_w S_un_w;
}
public in_addr sin_addr;
}
[DllImport("ws2_32.dll", CharSet = CharSet.Auto, SetLastError = true)]
static extern int WSAGetLastError();
[DllImport("ws2_32.dll", CharSet = CharSet.Auto, SetLastError = true)]
static extern void WSASetLastError(int set);
[DllImport("Ws2_32.dll", CharSet = CharSet.Ansi)]
public static extern uint inet_addr(string cp);
[DllImport("Ws2_32.dll")]
public static extern ushort htons(ushort hostshort);
[DllImport("ws2_32.dll", CharSet = CharSet.Unicode, SetLastError = true)]
public static extern IntPtr socket(short af, short socket_type, int protocol);
[DllImport("Ws2_32.dll")]
public static extern int send(IntPtr s, IntPtr buf, int len, int flags);
[DllImport("Ws2_32.dll")]
public static extern int recv(IntPtr s, IntPtr buf, int len, int flags);
[DllImport("ws2_32.dll", CharSet = CharSet.Unicode, SetLastError = true)]
public static extern int closesocket(IntPtr s);
[DllImport("Ws2_32.dll")]
public static extern ushort ntohs(ushort netshort);
[DllImport("kernel32.dll", SetLastError = true)]
private static extern void SetLastError(int errorCode);
private string _name;
private LocalHook _hook;
public bool Error { get; set; }
public string Name { get; set; }
private string proxyIp, proxyPort, proxyUser, proxyPass;
public WinSockConnectController(IntPtr address, string proxyIp, string proxyPort, string proxyUser, string proxyPass)
{
this.Name = typeof(WinSockConnectController).Name;
this.proxyIp = proxyIp;
this.proxyPort = proxyPort;
this.proxyUser = proxyUser;
this.proxyPass = proxyPass;
try
{
_name = string.Format("WinsockHook_{0:X}", address.ToInt32());
_hook = LocalHook.Create(address, new WinsockConnectDelegate(WinsockConnectDetour), this);
_hook.ThreadACL.SetExclusiveACL(new Int32[] { 1 });
}
catch (Exception)
{
this.Error = true;
}
}
private object wSockLock = new object();
private int WinsockConnectDetour(IntPtr s, IntPtr addr, int addrsize)
{
lock (wSockLock)
{
// retrieve remote ip
sockaddr_in structure = (sockaddr_in)Marshal.PtrToStructure(addr, typeof(sockaddr_in));
string remoteIp = new System.Net.IPAddress(structure.sin_addr.S_addr).ToString();
ushort remotePort = ntohs(structure.sin_port);
HookManager.Log("Ip: " + remoteIp + " Port: " + remotePort.ToString() + " Addrsize: " + addrsize);
if (!proxyIp.Equals(""))
//if (!proxyIp.Equals(""))
{
// connect to socks5 server
SetAddr(s, addr, proxyIp, proxyPort);
var result = Connect(s, addr, addrsize);
if (result == -1)
return -1;
// send socks 5 request
IntPtr socksProtocolRequest = SetUpSocks5Request();
result = send(s, socksProtocolRequest, 4, 0);
if (result == -1)
return -1;
// retrieve server repsonse
var response = Recieve(s, 2);
if (response == IntPtr.Zero)
return -1;
byte[] recvBytes = new byte[2] { Marshal.ReadByte(response), Marshal.ReadByte(response, 1) };
if (recvBytes[1] == 255)
{
HookManager.Log("No authentication method was accepted by the proxy server");
return -1;
}
if (recvBytes[0] != 5)
{
HookManager.Log("No SOCKS5 proxy");
return -1;
}
// if auth request response, send authenicate request
if (recvBytes[1] == 2)
{
int length = 0;
var authenticateRequest = SetUpAuthenticateRequest(proxyUser, proxyPass, out length);
result = Send(s, authenticateRequest, length);
response = Recieve(s, 2);
if (response == IntPtr.Zero)
return -1;
recvBytes = new byte[2] { Marshal.ReadByte(response), Marshal.ReadByte(response, 1) };
if (recvBytes[1] != 0)
{
HookManager.Log("Proxy: incorrect username/password");
return -1;
}
}
// request bind with server
var bindRequest = SetUpBindWithRemoteHost(remoteIp, remotePort);
result = Send(s, bindRequest, 10);
if (result == -1)
return -1;
// response
response = Recieve(s, 10);
if (response == IntPtr.Zero)
return -1;
if (!VerifyBindResponse(response))
return -1;
// success
WSASetLastError(0);
SetLastError(0);
// clean memory
foreach (var ptr in allocatedMemory)
Marshal.FreeHGlobal(ptr);
allocatedMemory.Clear();
return 0;
}
else
{
var result = connect(s, addr, addrsize);
return result;
}
}
}
private int Connect(IntPtr socket, IntPtr addr, int addrsize)
{
var result = connect(socket, addr, addrsize);
while (result == -1)
{
var errorcode = WSAGetLastError();
HookManager.Log("Error: " + errorcode);
if (errorcode == 10056)
break;
if (errorcode == 10037)
break;
if (errorcode != 10035 && errorcode != 10037)
return -1;
//flag = 1;
result = connect(socket, addr, addrsize);
}
return result;
}
private int Send(IntPtr socket, IntPtr buf, int len)
{
var result = send(socket, buf, len, 0);
while (result == -1)
{
var errorcode = WSAGetLastError();
HookManager.Log("Error: " + errorcode);
if (errorcode == 10056)
break;
if (errorcode == 10037)
break;
if (errorcode != 10035 && errorcode != 10037)
return -1;
result = send(socket, buf, 4, 0);
}
return result;
}
private List<IntPtr> allocatedMemory = new List<IntPtr>();
private IntPtr Recieve(IntPtr socket, int len)
{
var buffer = Marshal.AllocHGlobal(len);
allocatedMemory.Add(buffer);
var result = recv(socket, buffer, len, 0);
if (result == -1)
{
HookManager.Log("Error2: " + WSAGetLastError());
return IntPtr.Zero;
}
return buffer;
}
private IntPtr RecieveAuth(IntPtr socket, int len)
{
var buffer = Marshal.AllocHGlobal(len);
allocatedMemory.Add(buffer);
var result = recv(socket, buffer, len, 0);
if (result == -1)
{
HookManager.Log("Error3: " + WSAGetLastError());
return IntPtr.Zero; ;
}
if (result == 0)
return buffer;
if (result != 2)
{
HookManager.Log("Proxy: Bad response from server");
return IntPtr.Zero;
}
return buffer;
}
private IntPtr RecieveBind(IntPtr socket, int len)
{
var buffer = Marshal.AllocHGlobal(len);
allocatedMemory.Add(buffer);
var result = recv(socket, buffer, len, 0);
if (result == -1)
{
HookManager.Log("Error3: " + WSAGetLastError());
return IntPtr.Zero; ;
}
if (result == 0)
return buffer;
if (result != 10)
{
HookManager.Log("Proxy: Bad response from server");
return IntPtr.Zero;
}
return buffer;
}
private void SetAddr(IntPtr socket, IntPtr addr, string ip, string port)
{
sockaddr_in structure = (sockaddr_in)Marshal.PtrToStructure(addr, typeof(sockaddr_in));
string originalip = new System.Net.IPAddress(structure.sin_addr.S_addr).ToString();
ushort originalport = ntohs(structure.sin_port);
structure.sin_addr.S_addr = inet_addr(ip);
structure.sin_port = htons(Convert.ToUInt16(port));
Marshal.StructureToPtr(structure, addr, true);
structure = (sockaddr_in)Marshal.PtrToStructure(addr, typeof(sockaddr_in));
}
private IntPtr SetUpSocks5Request()
{
var initialRequest = Marshal.AllocHGlobal(4);
Marshal.WriteByte(initialRequest, Convert.ToByte(5));
Marshal.WriteByte(initialRequest + 1, Convert.ToByte(2));
Marshal.WriteByte(initialRequest + 2, Convert.ToByte(0));
Marshal.WriteByte(initialRequest + 3, Convert.ToByte(2));
return initialRequest;
}
private IntPtr SetUpAuthenticateRequest(string username, string password, out int index)
{
index = 0;
var size = 3 + Encoding.Default.GetBytes(username).Length + Encoding.Default.GetBytes(password).Length;
var authenticateBuffer = Marshal.AllocHGlobal(size);
Marshal.WriteByte(authenticateBuffer + index++, Convert.ToByte(1));
Marshal.WriteByte(authenticateBuffer + index++, Convert.ToByte(username.Length));
byte[] rawBytes;
if (username.Length > 0)
{
rawBytes = Encoding.Default.GetBytes(username);
for (int i = 0; i < rawBytes.Length; i++)
{
Marshal.WriteByte(authenticateBuffer + index++, rawBytes[i]);
}
}
Marshal.WriteByte(authenticateBuffer + index++, Convert.ToByte(password.Length));
if (password.Length > 0)
{
rawBytes = Encoding.Default.GetBytes(password);
for (int i = 0; i < rawBytes.Length; i++)
{
Marshal.WriteByte(authenticateBuffer + index++, rawBytes[i]);
}
}
return authenticateBuffer;
}
private IntPtr SetUpBindWithRemoteHost(string eveIP, ushort evePort)
{
var bindWithEveBuffer = Marshal.AllocHGlobal(10);
var iplist = eveIP.Split('.').ToList();
byte[] portbyte = BitConverter.GetBytes(evePort).Reverse().ToArray();
byte[] newbyte = new byte[2];
int indexy = 0;
foreach (var byty in portbyte)
{
newbyte[indexy] = byty;
indexy++;
}
// bind with remote server
Marshal.WriteByte(bindWithEveBuffer, Convert.ToByte(5));
Marshal.WriteByte(bindWithEveBuffer + 1, Convert.ToByte(1));
Marshal.WriteByte(bindWithEveBuffer + 2, Convert.ToByte(0));
Marshal.WriteByte(bindWithEveBuffer + 3, Convert.ToByte(1));
Marshal.WriteByte(bindWithEveBuffer + 4, Convert.ToByte(iplist[0]));
Marshal.WriteByte(bindWithEveBuffer + 5, Convert.ToByte(iplist[1]));
Marshal.WriteByte(bindWithEveBuffer + 6, Convert.ToByte(iplist[2]));
Marshal.WriteByte(bindWithEveBuffer + 7, Convert.ToByte(iplist[3]));
Marshal.WriteByte(bindWithEveBuffer + 8, newbyte[0]);
Marshal.WriteByte(bindWithEveBuffer + 9, newbyte[1]);
return bindWithEveBuffer;
}
private bool VerifyBindResponse(IntPtr buffer)
{
var recvBytes = new byte[10] { Marshal.ReadByte(buffer), Marshal.ReadByte(buffer, 1), Marshal.ReadByte(buffer, 2), Marshal.ReadByte(buffer, 3), Marshal.ReadByte(buffer, 4), Marshal.ReadByte(buffer, 5), Marshal.ReadByte(buffer, 6), Marshal.ReadByte(buffer, 7), Marshal.ReadByte(buffer, 8), Marshal.ReadByte(buffer, 9) };
if (recvBytes[1] != 0)
{
if (recvBytes[1] == 1)
HookManager.Log("General failure");
if (recvBytes[1] == 2)
HookManager.Log("connection not allowed by ruleset");
if (recvBytes[1] == 3)
HookManager.Log("network unreachable");
if (recvBytes[1] == 4)
HookManager.Log("host unreachable");
if (recvBytes[1] == 5)
HookManager.Log("connection refused by destination host");
if (recvBytes[1] == 6)
HookManager.Log("TTL expired");
if (recvBytes[1] == 7)
HookManager.Log("command not supported / protocol error");
if (recvBytes[1] == 8)
HookManager.Log("address type not supported");
HookManager.Log("Proxy: Connection error binding eve server");
return false;
}
return true;
}
public void Dispose()
{
if (_hook == null)
return;
_hook.Dispose();
_hook = null;
}
}
}

Categories