Web Proxy Problem

L

Lentdave67t

Thank you in advance for any help you can provide. I am writing a C# program
that checks to see if the URLs of favorites/bookmarks are still good. The
problem I am having is that while the program is checking the URLs, the text in
a label on the current window will not update until after all URLs are checked.
I think the Form/Window is frozen while the http requests are occuring. Does
anyone know of a way I can update the Form in real time while checking the
URLs. A code snippet is below:

using System;
using System.Net;
using System.IO;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
using System.Data;
using System.Diagnostics;
using System.Threading;

namespace URLChecker
{
/// <summary>
/// Summary description for Class1.
/// </summary>
public class URLClass
{
private OutputForm OutForm;

public URLClass()
{
FileInfo[] fileNames; // FileInfo array

OutForm = new OutputForm();

OutForm.Show();
OutForm.Focus();
//GetFavoritesDirectory();
fileNames = GetFileNames();
CheckURL(fileNames);
}

public void GetFavoritesDirectory()
{
Stream myStream;
OpenFileDialog openFileDialog1 = new OpenFileDialog();

openFileDialog1.InitialDirectory = "c:\\Documents and Settings\\Owner" ;
openFileDialog1.Filter = "txt files (*.txt)|*.txt|All files (*.*)|*.*" ;
openFileDialog1.FilterIndex = 2 ;
openFileDialog1.RestoreDirectory = true ;

if(openFileDialog1.ShowDialog() == DialogResult.OK)
{
if((myStream = openFileDialog1.OpenFile())!= null)
{
// Insert code to read the stream here.
myStream.Close();
}
}
}

public void CheckURL(FileInfo[] sURL)
{
WebRequest wrGETURL;

WebProxy myProxy = new WebProxy("myproxy",80);
myProxy.BypassProxyOnLocal = true;

Stream objStream;

foreach (FileInfo fiTemp in sURL)
{
if(fiTemp != null)
{
string currentURL = null;

// ToDo: write code to extract the URL from fiTemp.Name
currentURL = ExtractURL(fiTemp.Name);
wrGETURL = WebRequest.Create(currentURL);
wrGETURL.Proxy = WebProxy.GetDefaultProxy();

// Try to make a get request to the site
// If the get request was unsuccessful
// Tell the user the URL is bad and exit the program

// ============================== The below line is what will not update
in real time
OutForm.CheckingW
ebSiteLabel.Text = fiTemp.Name;

try
{
objStream = wrGETURL.GetResponse().GetResponseStream();
StreamReader objReader = new StreamReader(objStream);

//Console.ReadLine();
//Console.WriteLine("{0} is good", fiTemp.Name);
}

catch(Exception q)
{
objStream = null;
//Console.WriteLine("{0} is bad", fiTemp.Name);
//System.Console.WriteLine(q);
}
}
}
//Console.WriteLine("Finished checking");
}

// Return file names that end in .url
public FileInfo[] GetFileNames()
{
String tempString = null;
String tempURL = null;
int j=0;

FileInfo[] FI = new FileInfo[250];
// Create a reference to the current directory.
DirectoryInfo di = new DirectoryInfo("c:\\Documents and
Settings\\Owner\\Favorites");

// Create an array representing the files in the current directory.
FileInfo[] tempFI = di.GetFiles();

// Remove all filenames that do not end in .url or
// their url does not start with http://
foreach(FileInfo i in tempFI)
{
tempString = i.ToString();

if(tempString.EndsWith(".url"))
{
tempURL = ExtractURL(tempString);
if(tempURL.StartsWith("http://"))
{
FI[j]=i;
j++;
}
}
}

return FI; // Return the array of file names
}

// Open the file named "fileString" and extract the
// URL from the second line after chopping off "BASEURL="
// The file is in the following format:
// [Default]
// BASEURL=http.....
// [Internet Shortcut]
// URL=http.....
// Modified=......
//
// Note: The above can only be viewed using 'edit' from DOS
// If you use Notepad you will get a copy of the cached web page
public string ExtractURL(string fileString)
{
int x = 0;
string[] input = new string[100];
char[] trimChars = new char[8] {'B', 'A', 'S', 'E', 'U', 'R', 'L', '='};

StreamReader sr = File.OpenText("c:\\Documents and
Settings\\Owner\\Favorites\\" + fileString);

while((input[x] = sr.ReadLine()) != null) {
x++;
}
sr.Close();

// Remove "BASEURL=" from the start of the string
input[1]=input[1].TrimStart(trimChars);
return input[1];
}
}

}
 
J

Joerg Jooss

Lentdave67t said:
Thank you in advance for any help you can provide. I am writing a C#
program that checks to see if the URLs of favorites/bookmarks are
still good. The problem I am having is that while the program is
checking the URLs, the text in a label on the current window will not
update until after all URLs are checked. I think the Form/Window is
frozen while the http requests are occuring. Does anyone know of a
way I can update the Form in real time while checking the URLs.

You have to introduce a worker thread handles the HTTP stuff. The easiest
approach IMHO is to use asynchronous I/O, so instead of calling
GetResponse(), you call BeginGetResposne()/EndGetResponse(). See
http://msdn.microsoft.com/library/d...thttpwebrequestclassbegingetresponsetopic.asp

Cheers,
 

Ask a Question

Want to reply to this thread or ask your own question?

You'll need to choose a username for the site, which only take a couple of moments. After that, you can post your question and our members will help you out.

Ask a Question

Top