PERFORMANCE PROBLEM C# vs C++
-
i wrote the same code both in VISUALL C++ AND C#. in both cases, the OPTIMIZED SWITCH is turned on. the results: in C#, around 300 msec, in c++ less then 1 msec. what is happening here??? in general i need to process 4 megabyte each 250 msec, and it looks likt c# is not the answer... or maybe there is another optimize switch in c# i am not aware of (beside the one in the BUILD TAB). can someone please advice? is it true c# is not good for real time? THE CODE in visual studio 2005 c#: =================================================================================== using System; using System.Collections.Generic; using System.Text; using System.Diagnostics ; namespace ConsoleApplication10 { class Program { static void Main(string[] args) { byte []byte1=new byte[2097152*2]; byte[] byte2=new byte[2097152*2]; Stopwatch sw=new Stopwatch (); sw.Start (); for(int i=0;i<2097152*2;i++) { for(int y=0;y<64;y++) { if(byte1[y]==byte2[y]) //if (*(bytes1+i)==*(bytes2+i)) { } } } sw.Stop (); Console.Write ("{0}",sw.ElapsedMilliseconds ) ; Console.ReadKey (); } } } THE CODE IN VISUALL C++ 2005: =============================== #include "stdafx.h" #using #using #using using namespace System; using namespace System::Diagnostics; using namespace System::Windows::Forms; using namespace System::Drawing; using namespace System::Drawing::Drawing2D; int _tmain(int argc, _TCHAR* argv[]) { char *bytes1=new char[2097152*2]; char *bytes2=new char[2097152*2]; Stopwatch^ stopWatch; stopWatch = gcnew Stopwatch; stopWatch->Start (); //for (int u=0;u<10000;u++) for (int i=0;i<2097152*2;i++) { for (int y=0;y<64;y++) { if (bytes1[y]==bytes2[y]) //if (*(bytes1+i)==*(bytes2+i)) { } } } long ii; ii =(long)stopWatch->ElapsedMilliseconds; stopWatch->Stop (); printf("%d",ii); //ElapsedMilliseconds Ticks return 0; }