G
Guest
Hi,
I have experienced low performance using the System.Net.Sockes.Socket object:
1. An unmanaged application that use IOCompletion ports gives performance of about 1.33 better then the same app written in C# using the Begin*/End* APIs.
2. I have did an additional test using .NET Sockets: I have created an Application that uses the Blocking interface of the .NET Sockets to receive data ( Socket.Receive ), surprisingly the 'blocking' application gave a performance better the application that uses async IO ( about 15% better ) although usage of the performance monitor showed that the async IO app produce less context switches and less interop marshaling..., why is that happening, why does the blocking 'server' give better performance then the async server?????
Note that I have used the same client for bought of the servers.
Attached you could find the server/client code used to produce the results, I have tried to prodce code as compact as possible... this is a single executable that runs bought client and server ( according to command line arguments ), the test was done on the same computer so the network bottle neck will be disabled, following is the actual code (Just cut and paste it to a console application):
using System;
using System.Net;
using System.Net.Sockets;
using System.Threading;
namespace SocketTester
{
class Program
{
static protected byte[] m_btBuffer = new byte[1024 * 30];
static protected Socket m_Socket = null;
static protected Socket m_socClient = null;
static protected long m_lEndTime = 0;
static protected long m_lDataReceived = 0;
static protected AsyncCallback m_ReceptionDelegate = null;
static protected void ReceptionCallback(IAsyncResult ar)
{
long iRecievedAmount = (long)m_socClient.EndReceive(ar);
m_lDataReceived += iRecievedAmount;
if (0 == iRecievedAmount)
{
m_lEndTime = Environment.TickCount;
((EventWaitHandle)ar.AsyncState).Set();
return;
}
m_socClient.BeginReceive(m_btBuffer, 0, m_btBuffer.Length, 0, m_ReceptionDelegate, ar.AsyncState);
}
static void Main(string[] args)
{
if ('l' == args[0][0])// Listener
{
m_Socket = new Socket(AddressFamily.InterNetwork
, SocketType.Stream
, ProtocolType.Tcp);
m_Socket.Bind(new IPEndPoint(0, 5001));
m_Socket.Listen(10);
m_socClient = m_Socket.Accept();
Console.WriteLine("Session Started...");
int lTime = Environment.TickCount;
int iBytesRead = 0;
if ('s' == args[0][1])// [L]istenerynchronous
{
while (0 != (iBytesRead = m_socClient.Receive(m_btBuffer)))
m_lDataReceived += iBytesRead;
m_lEndTime = Environment.TickCount;
}
else if ('a' == args[0][1])// [L]istener [A]synchronous
{
EventWaitHandle evntCompletion = new EventWaitHandle(false, EventResetMode.ManualReset);
m_ReceptionDelegate = new AsyncCallback(ReceptionCallback);
m_socClient.BeginReceive(m_btBuffer, 0, m_btBuffer.Length, 0, m_ReceptionDelegate, evntCompletion);
evntCompletion.WaitOne();
}
float fDuration = (m_lEndTime - lTime) / 1000.0f;
Console.WriteLine("Duration: {0}sec", fDuration);
Console.WriteLine("Bit rate: {0}mb/sec", (m_lDataReceived / (1000 * 1000)) / fDuration);
Console.WriteLine("Session terminated...");
}
else if ("c" == args[0])// [C]lient, data originator.
{
TcpClient Client = new TcpClient("127.0.0.1", 5001);
NetworkStream Stream = Client.GetStream();
int iAmount = (int.Parse(args[1]) * 1000 * 1000) / m_btBuffer.Length;
for (int i = 0; i < iAmount; i++)
Stream.Write(m_btBuffer, 0, m_btBuffer.Length);
Stream.Close();
Client.Close();
}
else
{
Console.WriteLine("SocketTester.exe c - Client");
Console.WriteLine(" la - Listener, Asynch");
Console.WriteLine(" ls - Listener, Synch");
}
}
}
}
Nadav
http://www.ddevel.com
I have experienced low performance using the System.Net.Sockes.Socket object:
1. An unmanaged application that use IOCompletion ports gives performance of about 1.33 better then the same app written in C# using the Begin*/End* APIs.
2. I have did an additional test using .NET Sockets: I have created an Application that uses the Blocking interface of the .NET Sockets to receive data ( Socket.Receive ), surprisingly the 'blocking' application gave a performance better the application that uses async IO ( about 15% better ) although usage of the performance monitor showed that the async IO app produce less context switches and less interop marshaling..., why is that happening, why does the blocking 'server' give better performance then the async server?????
Note that I have used the same client for bought of the servers.
Attached you could find the server/client code used to produce the results, I have tried to prodce code as compact as possible... this is a single executable that runs bought client and server ( according to command line arguments ), the test was done on the same computer so the network bottle neck will be disabled, following is the actual code (Just cut and paste it to a console application):
using System;
using System.Net;
using System.Net.Sockets;
using System.Threading;
namespace SocketTester
{
class Program
{
static protected byte[] m_btBuffer = new byte[1024 * 30];
static protected Socket m_Socket = null;
static protected Socket m_socClient = null;
static protected long m_lEndTime = 0;
static protected long m_lDataReceived = 0;
static protected AsyncCallback m_ReceptionDelegate = null;
static protected void ReceptionCallback(IAsyncResult ar)
{
long iRecievedAmount = (long)m_socClient.EndReceive(ar);
m_lDataReceived += iRecievedAmount;
if (0 == iRecievedAmount)
{
m_lEndTime = Environment.TickCount;
((EventWaitHandle)ar.AsyncState).Set();
return;
}
m_socClient.BeginReceive(m_btBuffer, 0, m_btBuffer.Length, 0, m_ReceptionDelegate, ar.AsyncState);
}
static void Main(string[] args)
{
if ('l' == args[0][0])// Listener
{
m_Socket = new Socket(AddressFamily.InterNetwork
, SocketType.Stream
, ProtocolType.Tcp);
m_Socket.Bind(new IPEndPoint(0, 5001));
m_Socket.Listen(10);
m_socClient = m_Socket.Accept();
Console.WriteLine("Session Started...");
int lTime = Environment.TickCount;
int iBytesRead = 0;
if ('s' == args[0][1])// [L]istener
{
while (0 != (iBytesRead = m_socClient.Receive(m_btBuffer)))
m_lDataReceived += iBytesRead;
m_lEndTime = Environment.TickCount;
}
else if ('a' == args[0][1])// [L]istener [A]synchronous
{
EventWaitHandle evntCompletion = new EventWaitHandle(false, EventResetMode.ManualReset);
m_ReceptionDelegate = new AsyncCallback(ReceptionCallback);
m_socClient.BeginReceive(m_btBuffer, 0, m_btBuffer.Length, 0, m_ReceptionDelegate, evntCompletion);
evntCompletion.WaitOne();
}
float fDuration = (m_lEndTime - lTime) / 1000.0f;
Console.WriteLine("Duration: {0}sec", fDuration);
Console.WriteLine("Bit rate: {0}mb/sec", (m_lDataReceived / (1000 * 1000)) / fDuration);
Console.WriteLine("Session terminated...");
}
else if ("c" == args[0])// [C]lient, data originator.
{
TcpClient Client = new TcpClient("127.0.0.1", 5001);
NetworkStream Stream = Client.GetStream();
int iAmount = (int.Parse(args[1]) * 1000 * 1000) / m_btBuffer.Length;
for (int i = 0; i < iAmount; i++)
Stream.Write(m_btBuffer, 0, m_btBuffer.Length);
Stream.Close();
Client.Close();
}
else
{
Console.WriteLine("SocketTester.exe c - Client");
Console.WriteLine(" la - Listener, Asynch");
Console.WriteLine(" ls - Listener, Synch");
}
}
}
}
Nadav
http://www.ddevel.com