aboutsummaryrefslogtreecommitdiff
path: root/misc/bidwatcher
diff options
context:
space:
mode:
authorDavid E. O'Brien <obrien@FreeBSD.org>2005-01-14 02:44:47 +0000
committerDavid E. O'Brien <obrien@FreeBSD.org>2005-01-14 02:44:47 +0000
commitd89c8acb7845899bf96f61c759e7aca3c5a2cabe (patch)
tree5369160be41ae692cc7a0df8ee68456ba9a6d9e0 /misc/bidwatcher
parent8559e27d95822252967f370d14fdce4b972374e5 (diff)
downloadports-d89c8acb7845899bf96f61c759e7aca3c5a2cabe.tar.gz
ports-d89c8acb7845899bf96f61c759e7aca3c5a2cabe.zip
Notes
Diffstat (limited to 'misc/bidwatcher')
-rw-r--r--misc/bidwatcher/Makefile10
-rw-r--r--misc/bidwatcher/files/patch-Makefile.in5
-rw-r--r--misc/bidwatcher/files/patch-bidwatcher.cpp294
-rw-r--r--misc/bidwatcher/files/patch-bidwatcher.h36
-rw-r--r--misc/bidwatcher/files/patch-netstuff.cpp646
5 files changed, 985 insertions, 6 deletions
diff --git a/misc/bidwatcher/Makefile b/misc/bidwatcher/Makefile
index a7ef976084b0..d1cedd82934d 100644
--- a/misc/bidwatcher/Makefile
+++ b/misc/bidwatcher/Makefile
@@ -8,18 +8,20 @@
PORTNAME= bidwatcher
PORTVERSION= 1.3.16
-PORTREVISION?= 0
+PORTREVISION?= 1
CATEGORIES= misc
MASTER_SITES= ${MASTER_SITE_SOURCEFORGE}
MASTER_SITE_SUBDIR= ${PORTNAME}
-PATCH_SITES= http://sourceforge.net/tracker/download.php?group_id=2973&atid=302973&file_id=63934&aid=820963/
-PATCHFILES= add_auction_by_url_also.patch
-PATCH_DIST_STRIP= -p1
+#PATCH_SITES= http://sourceforge.net/tracker/download.php?group_id=2973&atid=302973&file_id=112451&aid=1085302
+#PATCHFILES= bidwatcher-1.3.16-SSL_with_libcurl.02.patch
+#PATCH_DIST_STRIP= -p1
MAINTAINER= mph@FreeBSD.org
COMMENT= Bid monitor for eBay
+LIB_DEPENDS= curl.3:${PORTSDIR}/ftp/curl
+
PLIST_FILES= bin/bidwatcher
USE_GNOME= gtk12
GNU_CONFIGURE= YES
diff --git a/misc/bidwatcher/files/patch-Makefile.in b/misc/bidwatcher/files/patch-Makefile.in
new file mode 100644
index 000000000000..f2c9a5f37b62
--- /dev/null
+++ b/misc/bidwatcher/files/patch-Makefile.in
@@ -0,0 +1,5 @@
+--- Makefile.in.orig Mon Aug 30 18:30:57 2004
++++ Makefile.in Thu Jan 13 18:23:18 2005
+@@ -98 +98 @@
+-LIBS = @LIBS@
++LIBS = @LIBS@ -lcurl
diff --git a/misc/bidwatcher/files/patch-bidwatcher.cpp b/misc/bidwatcher/files/patch-bidwatcher.cpp
index c38826f31fe2..f6dacea8e5ed 100644
--- a/misc/bidwatcher/files/patch-bidwatcher.cpp
+++ b/misc/bidwatcher/files/patch-bidwatcher.cpp
@@ -1,7 +1,297 @@
--- bidwatcher.cpp.orig Mon Aug 30 18:03:34 2004
-+++ bidwatcher.cpp Sat Oct 30 20:00:20 2004
-@@ -2972,2 +2972,2 @@
++++ bidwatcher.cpp Thu Jan 13 18:31:11 2005
+@@ -29,12 +29,12 @@
+ #include <time.h>
+ #include <sys/types.h>
+ #include <sys/stat.h>
+-#include <sys/signal.h>
+-#include <ctype.h>
++//#include <sys/signal.h>
++//#include <ctype.h>
+ #include <sys/time.h>
+-#include <sys/socket.h>
+-#include <stdlib.h>
+-#include <netdb.h>
++//#include <sys/socket.h>
++//#include <stdlib.h>
++//#include <netdb.h>
+
+ // Handle strict Ansi compliance with gcc 3.1 while retaining
+ // backward compatibility.
+@@ -595,6 +595,40 @@
+ delete obj;
+ }
+
++int dosignin() {
++
++ URL *bidurl;
++
++ char *Buff;
++ int returnVal;
++ char *ustring, *vstring;
++ char cookiejar[200];
++ sprintf(cookiejar, "%s/%s/cookies.txt", getenv("HOME"), bw_subdir);
++
++ /* Sign in First */
++ bidurl = new URL("http://signin.ebay.com/ws2/eBayISAPI.dll?SignIn",
++ "",proxyurl);
++ returnVal = fetchURL(bidurl, 0, &Buff, cookiejar, 2);
++
++ delete(bidurl);
++
++ ustring = g_strdup_printf("https://signin.ebay.com/ws/eBayISAPI.dll"
++ "?co_partnerid=2&amp;siteid=0&amp;UsingSSL=1");
++ vstring = g_strdup_printf("MfcISAPICommand=SignInWelcome"
++ "&siteid=0&co_partnerId=2&UsingSSL=1"
++ "&ru=&pp=&pa1=&pa2=&pa3=&i1=-1&pageType=-1"
++ "&userid=%s&pass=%s&keepMeSignInOption=1",
++ authID, authPASS);
++
++ bidurl = new URL(ustring, vstring, proxyurl);
++ g_free(ustring);
++ g_free(vstring);
++
++ returnVal = fetchURL(bidurl, 1, &Buff, cookiejar, 0);
++
++ return returnVal;
++}
++
+
+ void auctioninfo::getkey(float bid, int quantity) {
+ char *Buff;
+@@ -602,9 +636,13 @@
+ snipeQty=quantity;
+ myBidAmount=bid;
+ myBidQuantity=quantity;
+- //printf("myBidAmount set to %f\n",bid);
++ int returnVal;
++
++ char *ustring, *vstring;
++ char cookiejar[200];
++ sprintf(cookiejar, "%s/%s/cookies.txt", getenv("HOME"), bw_subdir);
+
+- char *ustring;
++ dosignin();
+
+ /* eBay has gotten pickier about URLs. If you don't specify
+ * ebaymotors then you get some unknown error, yet the bid
+@@ -614,6 +652,7 @@
+ * key URL. This should change so we don't look suspicious.
+ */
+
++#if 0
+ (AuctionType == TYPE_EBAYMOTORSCAR || AuctionType == TYPE_EBAYMOTORS) ?
+ ustring = g_strdup_printf("http://cgi.ebay.com/ebaymotors/"
+ "aw-cgi/eBayISAPI.dll"
+@@ -625,14 +664,17 @@
+ "?MfcISAPICommand=MakeBid"
+ "&item=%llu&maxbid=%.2f&quant=%d",
+ ItemNumber,bid,quantity);
++#endif
++
++ ustring = g_strdup_printf("http://offer.ebay.com/ws/eBayISAPI.dll");
++ vstring = g_strdup_printf("MfcISAPICommand=MakeBid&fp=2&co_partnerid="
++ "&item=%llu&maxbid=%.2f&quant=%d",
++ ItemNumber,bid,quantity);
++
++ bidurl = new URL(ustring, vstring, proxyurl);
+
+- /* Lets keep the URL fresh. */
+- if (bidurl)
+- delete(bidurl);
+- bidurl = new URL(ustring, proxyurl);
+- g_free(ustring);
++ returnVal = fetchURL(bidurl, 1, &Buff, cookiejar, 0);
+
+- int returnVal = fetchURL(bidurl, 1, &Buff, TIMEOUT, 0);
+ if (returnVal != NET_SUCCESS) {
+ if (returnVal == NET_NETERROR)
+ showError("Could not obtain bid key: a network error occurred.");
+@@ -698,21 +740,18 @@
+ char *Buff;
+ char lineBuff[1024];
+ int returnVal,retval;
+- char *url=g_strdup(bidurl->url);
+- char *end=strstr(url,"?");
+- end[1]='\0';
++ char cookiejar[200];
++ sprintf(cookiejar, "%s/%s/cookies.txt", getenv("HOME"), bw_subdir);
+
+- sprintf(bidurl->url, "%sMfcISAPICommand=MakeBid&item=%llu&key=%s"
++ sprintf(bidurl->fullurlpost, "MfcISAPICommand=MakeBid&item=%llu&key=%s"
+ "&user=%s&pass=%s&maxbid=%.2f&quant=%d"
+ "&acceptbinbid=0&mode=1&userid=default&javascriptenabled=1",
+- url, ItemNumber, snipeKey, authID, authPASS, snipeAmount,
++ ItemNumber, snipeKey, authID, authPASS, snipeAmount,
+ snipeQty);
+
+- DPRINTF(DHIGH, ("Bid(%s)\n", bidurl->url));
++ DPRINTF(DHIGH, ("Bid(%s?%s)\n", bidurl->fullurl, bidurl->fullurlpost));
+
+- g_free(url);
+-
+- retval = fetchURL(bidurl, 1, &Buff, TIMEOUT, 0);
++ retval = fetchURL(bidurl, 1, &Buff, cookiejar, 0);
+
+ if (retval != NET_SUCCESS) {
+ sprintf(lineBuff,"Bid on %llu FAILED: Error %d connecting to eBay",
+@@ -949,6 +988,9 @@
+
+ void DoEmail(char * name, char * auctionid)
+ {
++ char cookiejar[200];
++ sprintf(cookiejar, "%s/%s/cookies.txt", getenv("HOME"), bw_subdir);
++
+ if (strlen(name) < 2) {
+ showError("No name to look up");
+ return;
+@@ -972,8 +1014,8 @@
+ sprintf(WebPage,"http://contact.ebay.com/aw-cgi/eBayISAPI.dll?"
+ "ReturnUserEmail&requested=%s&userid=%s&pass=%s&iid=%s",
+ name,authID,authPASS,auctionid);
+- URL *emailurl=new URL(WebPage,proxyurl);
+- int err = fetchURL(emailurl, 0, &Buff, TIMEOUT, 0);
++ URL *emailurl=new URL(WebPage,"",proxyurl);
++ int err = fetchURL(emailurl, 0, &Buff, cookiejar, 0);
+ delete(emailurl);
+
+ if ((err == NET_NETERROR) || (err == NET_TIMEOUT)
+@@ -1101,7 +1143,7 @@
+
+ if (strlen(proxystring) > 2) {
+ char *pxy = g_strdup_printf("http://%s/",proxystring);
+- proxyurl = new URL(pxy, NULL, proxyuserstring, proxypassstring);
++ proxyurl = new URL(pxy, "", NULL, proxyuserstring, proxypassstring);
+ g_free(pxy);
+ }
+ else proxyurl = NULL;
+@@ -2969,8 +3011,8 @@
+ browserPATH, url);
+ system(commandLine);
+ }
- else if (strstr(browserPATH, "mozilla") != NULL) {
- // Try to find mozilla.
+ else if (strcasestr(browserPATH, "mozilla") != NULL || strcasestr(browserPATH, "fire") != NULL) {
+ // Try to find mozilla, firefox, or firebird.
+
+ sprintf(commandLine,
+ "exec %s -remote \"openURL(%s,new-window)\"",
+@@ -3636,7 +3678,7 @@
+
+ if (strlen(proxystring) > 2) {
+ char *pxy = g_strdup_printf("http://%s/", proxystring);
+- proxyurl = new URL(pxy, NULL, proxyuserstring, proxypassstring);
++ proxyurl = new URL(pxy, "", NULL, proxyuserstring, proxypassstring);
+ g_free(pxy);
+ } else {
+ proxyurl = NULL;
+@@ -4076,37 +4118,6 @@
+ CHECK;
+ }
+
+-/*
+-gint getUserAuctions()
+-{
+- char address[1024];
+- char *Buff;
+- int ret;
+-
+- showStatus("Checking My eBay");
+-
+- sprintf(address, "http://signin.ebay.com/aw-cgi/eBayISAPI.dll?"
+- "MfcISAPICommand=SignInWelcome&siteid=0&co_partnerId=2"
+- "&UsingSSL=0&ru=http%%3A%%2F%%2Fcgi1.ebay.com%%2Faw-cgi%%2F"
+- "eBayISAPI.dll%%3FMyEbayAll%%26first%%3DN%%26dayssince%%3D2"
+- "%%26p1%%3D0%%26p2%%3D0%%26p3%%3D0%%26p4%%3D0%%26p5%%3D0%%26"
+- "rows%%3D25%%26pagebid%%3D1%%26pagewon%%3D1%%26pagesell%%3D1"
+- "%%26pagesold%3D1%%26pageunsold%%3D1%%26pagelost%%3D1%%26"
+- "page%%3D1%%26all%%3D1%%26SaveSetting%%3D362413158%%26pass"
+- "%%3D%%7B_pass_%%7D%%26userid%%3D&pp=pass&pa1=&pa2=&pa3=&"
+- "i1=-1&pageType=368&userid=%s&pass=%s", authID, authPASS);
+-
+- URL *myurl = new URL(address, proxyurl);
+- ret = fetchURL(myurl, 1, &Buff, TIMEOUT, 0);
+- delete(myurl);
+-
+- printf("BUFF: %s\n", Buff);
+-
+-
+- free(Buff);
+-}
+-*/
+-
+ //////////////////////////////////////////////////////////////////////////////
+ //
+ // GetUserAuctions - grabs users auctions from ebay and adds them to
+@@ -4126,6 +4137,8 @@
+ char WebPage[200];
+ int returnVal;
+ unsigned long long *ids;
++ char cookiejar[200];
++ sprintf(cookiejar, "%s/%s/cookies.txt", getenv("HOME"), bw_subdir);
+
+ if (!trackBids) return GUA_SUCCESS;
+ if (strlen(authID) < 2) return GUA_BADUSER;
+@@ -4136,8 +4149,8 @@
+ sprintf(WebPage, "http://cgi.ebay.com/aw-cgi/eBayISAPI.dll?"
+ "MfcISAPICommand=ViewBidItems&userid=%s&completed=0&all=1&rows=200",// items the user has bid on
+ authID);
+- URL *bidsurl = new URL(WebPage, proxyurl);
+- returnVal = fetchURL(bidsurl, 0, &Buff, TIMEOUT, 0);
++ URL *bidsurl = new URL(WebPage, "", proxyurl);
++ returnVal = fetchURL(bidsurl, 0, &Buff, cookiejar, 0);
+ delete(bidsurl);
+ blackLED();
+ showStatus("");
+@@ -4221,8 +4234,8 @@
+ "http://cgi6.ebay.com/ws/eBayISAPI.dll?ViewSellersOtherItems&userid=%s&sort=3&completed=0&since=-1&rd=1",
+ authID);
+
+- URL *listingurl = new URL(WebPage, proxyurl);
+- returnVal = fetchURL(listingurl, 0, &Buff, TIMEOUT, 0);
++ URL *listingurl = new URL(WebPage, "", proxyurl);
++ returnVal = fetchURL(listingurl, 0, &Buff, NULL, 0);
+ delete listingurl;
+ blackLED();
+ showStatus("");
+@@ -4415,18 +4428,18 @@
+ delete(infourl);
+ infourl = NULL;
+ }
+- infourl = new URL(urlstring, proxyurl);
++ infourl = new URL(urlstring, "", proxyurl);
+
+ greenLED();
+- returnVal = fetchURL(infourl, 0, &HtmlBuff, TIMEOUT, 0);
++ returnVal = fetchURL(infourl, 0, &HtmlBuff, NULL, 0);
+
+ if (returnVal == NET_NETERROR || returnVal == NET_TIMEOUT) {
+ // maybe proxy settings changed
+ if (infourl != NULL)
+ delete(infourl);
+- infourl = new URL(urlstring, proxyurl);
++ infourl = new URL(urlstring, "", proxyurl);
+
+- returnVal = fetchURL(infourl, 0, &HtmlBuff, TIMEOUT, 0);
++ returnVal = fetchURL(infourl, 0, &HtmlBuff, NULL, 0);
+
+ if (returnVal == NET_NETERROR || returnVal == NET_TIMEOUT) {
+ // Ok, so it probably is borked.
+@@ -4529,7 +4542,7 @@
+ gettimeofday(&tm_start, NULL);
+
+ resetTimeSyncURL();
+- returnVal = fetchURL(timesyncurl, 0, &HtmlBuff, TIMEOUT, 0);
++ returnVal = fetchURL(timesyncurl, 0, &HtmlBuff, NULL, 0);
+
+ gettimeofday(&tm_end, NULL);
+ t1 = (tm_end.tv_sec + 0.000001 * tm_end.tv_usec)
+@@ -4692,7 +4705,7 @@
+ }
+
+ timesyncurl = new URL("http://cgi3.ebay.com/aw-cgi/eBayISAPI.dll?TimeShow",
+- proxyurl);
++ "",proxyurl);
+ }
+
+ /////////////////////////////////////////////////////////////////////////////
diff --git a/misc/bidwatcher/files/patch-bidwatcher.h b/misc/bidwatcher/files/patch-bidwatcher.h
new file mode 100644
index 000000000000..e53469759aad
--- /dev/null
+++ b/misc/bidwatcher/files/patch-bidwatcher.h
@@ -0,0 +1,36 @@
+diff -urN bidwatcher-1.3.16/bidwatcher.h bidwatcher-1.3.16-SSL-2/bidwatcher.h
+--- bidwatcher.h.orig 2004-08-31 02:40:13.000000000 +0200
++++ bidwatcher.h 2004-12-14 17:10:14.000000000 +0100
+@@ -122,19 +122,15 @@
+ #define array_elem(a) (sizeof(a)/sizeof(a[0]))
+
+ struct URL {
+- struct hostent *hostinfo;
+- unsigned int port;
+- char url[256], hoststring[256];
++ char fullurl[512], fullurlpost[512];
+ bool useproxy;
+- int numaliases;
+- int numaddrs;
+ char *proxyuser, *proxypass;
+
+ URL(); // since there is dynamic memory involved, be safe.
+- URL(char *newurl, URL *proxy);
+- URL(char *newurl, URL *proxy, char *user, char *pass);
++ URL(char *newurl, char *postval, URL *proxy);
++ URL(char *newurl, char *postval, URL *proxy, char *user, char *pass);
+ ~URL();
+- void create(char *newurl, URL *proxy);
++ void create(char *newurl, char *postval, URL *proxy);
+ };
+
+ //
+@@ -399,7 +395,7 @@
+ void translate_date(char *date);
+ bool GetAucInfo(char *Buff, struct auctioninfo *NewAuction);
+ int calcTimeDiff (char *buffer);
+-int fetchURL(URL *url, int Post, char **Buff, double timeOut, int redir);
++int fetchURL(URL *url, int Post, char **Buff, char *cookiejar, int redir);
+ char *stristr(const char *haystack, const char *needle);
+ char translate_special(const char *input, int *len);
+ char *getTimeStamp();
diff --git a/misc/bidwatcher/files/patch-netstuff.cpp b/misc/bidwatcher/files/patch-netstuff.cpp
new file mode 100644
index 000000000000..36af3a5913bd
--- /dev/null
+++ b/misc/bidwatcher/files/patch-netstuff.cpp
@@ -0,0 +1,646 @@
+diff -urN bidwatcher-1.3.16/netstuff.cpp bidwatcher-1.3.16-SSL-2/netstuff.cpp
+--- netstuff.cpp.orig 2004-08-21 13:45:12.000000000 +0200
++++ netstuff.cpp 2004-12-14 17:09:44.000000000 +0100
+@@ -23,189 +23,73 @@
+ // GNU General Public License for more details.
+ //
+
+-#include <signal.h>
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <string.h>
+-#include <errno.h>
+-#include <fcntl.h>
+-#include <unistd.h>
+-#include <sys/time.h>
+-#include <time.h>
+-#include <netinet/in.h>
+-#include <sys/socket.h>
+-#include <sys/utsname.h>
+-#include <netdb.h>
+ #include <gtk/gtk.h>
++#include <curl/curl.h>
++#include <curl/types.h>
++#include <curl/easy.h>
+ #include "bidwatcher.h"
+
+-bool cancelPressed;
+-double avg_load_time = INITIAL_LOAD_TIME;
++// The next two functions are essentially taken from the examples
++// that come with the curl library. For speed we don't want to
++// start with an empty buffer, so memsize should be larger than size.
++
++struct MemoryBufferStruct {
++ char *memory; // This is the character array
++ size_t size; // This is the length of the string
++ size_t memsize; // This is the actual length of the array
++};
+
+-char cookie[512];
+-
+-void getAdultCookie(char * username, char * password) {
+- char *ustring = g_strdup_printf("http://cgi.ebay.com/aw-cgi/eBayISAPI.dll"
+- "?MfcISAPICommand=AdultLogin&userid=%s&"
+- "password=%s", username, password);
+- URL *url = new URL(ustring, proxyurl);
+- g_free(ustring);
+-
+- char *HtmlBuff;
+- fetchURL(url, 0, &HtmlBuff, TIMEOUT, 0);
+- char *adultcookie=strstr(HtmlBuff,"Set-Cookie");
+- if (adultcookie==NULL) {
+- fprintf(stderr, "adult cookie not found:\n");
+- fprintf(stderr, HtmlBuff);
+- free(HtmlBuff);
+- return;
++size_t WriteMemoryBuffer(void *ptr, size_t size, size_t nmemb, void *data)
++{
++ register int realsize = size * nmemb;
++ struct MemoryBufferStruct *mem = (struct MemoryBufferStruct *)data;
++ if( (size_t) (realsize) > (mem->memsize - mem->size) ) {
++ mem->memory = (char *)realloc(mem->memory, mem->size + realsize + 1);
++ mem->memsize = mem->size + realsize + 1;
+ }
+- int len=strcspn(adultcookie,";");
+- adultcookie[len]='\0';
+- printf("Adult cookie: %s\n",adultcookie+4); // strip "Set-"
+- strcpy(cookie,adultcookie+4);
+- free(HtmlBuff);
+-}
+-
+-/* necessary because otherwise, all hostents are identical, and overwritten
+- every time gethostbyname is called! :( */
+-int copyhostent(struct hostent *in, struct hostent *out,
+- int *numaliases, int *numaddrs) {
+-
+- if (in==NULL) return 0;
+-
+- // struct hostent *out = (struct hostent *) malloc(sizeof(struct hostent));
+- out->h_name = g_strdup(in->h_name);
+- int i;
+-
+- /* Count number of aliases */
+- for(i=0; in->h_aliases[i] != 0; i++);
+-
+- if (i > 0)
+- out->h_aliases = (char **) malloc(sizeof(char *) * i);
+-
+- *numaliases = i;
+-
+- for(i=0; in->h_aliases[i] != 0; i++)
+- out->h_aliases[i] = g_strdup(in->h_aliases[i]);
+-
+- out->h_addrtype = in->h_addrtype;
+- out->h_length = in->h_length;
+-
+- /* Count number of addresses */
+- for(i=0; in->h_addr_list[i] != 0; i++);
+-
+- if (i > 0)
+- out->h_addr_list = (char **) malloc(sizeof(char *) * i);
+-
+- *numaddrs = i;
+-
+- for(i=0; in->h_addr_list[i] != 0; i++) { // thanks Rodd Snook
+- out->h_addr_list[i] = (char *) malloc(sizeof(char) * in->h_length);
+- memcpy(out->h_addr_list[i], in->h_addr_list[i], in->h_length);
++ if (mem->memory) {
++ memcpy(&(mem->memory[mem->size]), ptr, realsize);
++ mem->size += realsize; // Update the string length
++ mem->memory[mem->size] = 0; // Terminate the string with a NULL
+ }
+-
+- return 1;
++ return realsize;
+ }
+
++struct MemoryBufferStruct curl_memory_chunk;
+
+-URL::URL(char *newurl, URL *proxy) {
++bool cancelPressed;
++double avg_load_time = INITIAL_LOAD_TIME;
++
++URL::URL(char *newurl, char *postval, URL *proxy) {
+ DPRINTF(DMED, ("New URL(%s).\n", newurl));
+- hostinfo = (struct hostent *) malloc(sizeof(struct hostent));
+- memset(hostinfo, 0, sizeof(struct hostent));
+- numaddrs = 0;
+- numaliases = 0;
+ proxyuser = NULL;
+ proxypass = NULL;
+- create(newurl, proxy);
++ create(newurl, postval, proxy);
+ }
+
+-URL::URL(char *newurl, URL *proxy, char *user, char *pass) {
++URL::URL(char *newurl, char *postval, URL *proxy, char *user, char *pass) {
+ DPRINTF(DMED, ("New URL(%s).\n", newurl));
+- hostinfo = (struct hostent *) malloc(sizeof(struct hostent));
+- memset(hostinfo, 0, sizeof(struct hostent));
+- numaddrs = 0;
+- numaliases = 0;
+ proxyuser = strdup(user);
+ proxypass = strdup(pass);
+- create(newurl, proxy);
++ create(newurl, postval, proxy);
+ }
+
+ URL::~URL() {
+- // Clean this memory waster up.
+- int i;
+- DPRINTF(DMED, ("Destroying URL(%s)\n", url));
+-
+- // If we are using a proxy, don't kill this.
+- if (!useproxy && hostinfo) {
+- if (hostinfo->h_name)
+- free(hostinfo->h_name);
+-
+- for(i=0; i < numaliases; i++)
+- free(hostinfo->h_aliases[i]);
+-
+- if (i > 0)
+- free(hostinfo->h_aliases);
+-
+- for(i=0; i < numaddrs; i++)
+- free(hostinfo->h_addr_list[i]);
+-
+- if (i > 0)
+- free(hostinfo->h_addr_list);
+-
+- free(hostinfo);
+- }
+-
++ DPRINTF(DMED, ("Destroying URL(%s)\n", fullurl));
+ if(proxyuser != NULL) free(proxyuser);
+ if(proxypass != NULL) free(proxypass);
+ }
+
+-void URL::create(char *newurl,URL *proxy) {
+- char *hostoff = strstr(newurl, "://") + 3;
++void URL::create(char *newurl, char *postval, URL *proxy) {
+
+- // Hmm, size_t isn't really an int, but this shouldn't affect us.
+- sprintf(hoststring, "%.*s", (int)strcspn(hostoff, "/"), hostoff);
++ sprintf(fullurl,"%s",newurl);
++ sprintf(fullurlpost,"%s",postval);
+
+ if (proxy == NULL) {
+- char tmpstr[256];
+-
+ useproxy = FALSE;
+-
+- strcpy(tmpstr, hoststring);
+- if (strlen(tmpstr) < strlen(hostoff)) {
+- strcpy(url, hostoff + strlen(tmpstr));
+- } else url[0] = '\0';
+-
+- char *portoff = strstr(tmpstr, ":");
+- if (portoff != NULL) {
+- port = atoi(portoff + 1);
+- portoff[0] = '\0';
+- } else port = 80;
+-
+-
+- if (hostinfo) {
+- // Destroy it before we malloc more.
+- int i;
+-
+- if (hostinfo->h_name)
+- g_free(hostinfo->h_name);
+-
+- for(i=0; i < numaliases; i++)
+- g_free(hostinfo->h_aliases[i]);
+-
+- if (numaliases > 0)
+- free(hostinfo->h_aliases);
+-
+- for(i=0; i < numaddrs; i++)
+- free(hostinfo->h_addr_list[i]);
+-
+- if (numaddrs > 0)
+- free(hostinfo->h_addr_list);
+-
+- //free(hostinfo);
+- }
+- copyhostent(gethostbyname(tmpstr), hostinfo, &numaliases, &numaddrs);
+-
+ } else {
+ #ifdef DEBUG_NETWORK
+ printf("Configured for proxy - host: %s, port: %d\n",
+@@ -213,317 +97,149 @@
+ proxy->port);
+ #endif
+ useproxy = TRUE;
+- hostinfo = proxy->hostinfo;
+- port = proxy->port;
+ proxyuser = strdup(proxy->proxyuser);
+ proxypass = strdup(proxy->proxypass);
+- strcpy(url, newurl);
+ }
+ }
+
+-void CloseSocket(int sockIT)
+-{
+- close(sockIT);
+-}
+-
+-int set_nonblock(int fd)
+-{
+- int flags = fcntl(fd, F_GETFL, 0);
+- if (flags == -1)
+- return -1;
+- return fcntl(fd, F_SETFL, flags | O_NONBLOCK);
+-}
+-
+ bool checkredir(URL *url, char *buff) {
+- if (strstr(buff,"HTTP/1.0 30")!=NULL || strstr(buff,"HTTP/1.1 30")!=NULL) {
+- char *loc = stristr(buff,"Location:");
+-
+- if (loc == NULL) {
+- showError("Redirection without Location header!\n");
+- return FALSE;
+- }
+- else
+- loc += 10; // strlen("Location: ");
+
+- loc[strcspn(loc,"\r\n ")]='\0';
++ // We are extracting the URL from a string of the form
++ // location: http://...
++ if (strstr(buff,"location: http") != NULL) {
++ char *loc = stristr(buff,"location: http");
++ loc = loc + 10;
++ loc[strcspn(loc,"\r\n")] = '\0';
++ sprintf(url->fullurl,"%s\n",loc);
++ return TRUE;
++ }
+
+- if (strstr(loc,"AdultLoginShow")) {
+-#ifdef DEBUG_NETWORK
+- printf("Getting adult cookie\n");
+-#endif
+- getAdultCookie(authID,authPASS);
+- if (strlen(cookie)<5) showError("Could not get adult cookie.");
+- } else url->create(loc,proxyurl);
+-#ifdef DEBUG_NETWORK
+- printf("Relocated to %s\n",loc);
+-#endif
++ // We are extracting the URL from a string of the form
++ // Location: http://...
++ if (strstr(buff,"Location: http") != NULL) {
++ char *loc = stristr(buff,"Location: http");
++ loc = loc + 10;
++ loc[strcspn(loc,"\r\n")] = '\0';
++ sprintf(url->fullurl,"%s\n",loc);
+ return TRUE;
+ }
+- else return FALSE;
+-}
+
+-int fetchURL(URL *url, int Post, char **Buff, double timeOut, int redir)
+-{
+- int buffLength,recerr;
+- int socketID;
+- char UserAgent[256];
+- char lineBuff[1024];
+- char htmlBuff[HUGEBUFF];
+
+- struct utsname utsbuf;
++ // We are extracting the URL from a string of the form
++ // <meta http-equiv="Refresh" content="0; url = http://...">
++ // First we seek to the end of the "url =" part, then start
++ // counting until we hit the ending " mark. Could we just
++ // link to a regular expression library instead?
+
+- struct sockaddr_in soc_in;
+- int err;
+- fd_set fds;
+- struct timeval tm;
++ if (strstr(buff,"<meta http-equiv=\"Refresh\" content=") != NULL) {
++ char *loc = stristr(buff,"<meta http-equiv=\"Refresh\" content=");
++ loc = loc + 35;
++ while ( (*loc != '\0') && (*loc != '"' ) ){
++ loc++;
++ }
+
+- *Buff = NULL;
++ if( *loc != '\0' ) loc++;
+
+- GTimer * time_since_start=g_timer_new();
+- g_timer_start(time_since_start);
++ while ( (*loc != '\0') && (*loc != '=' ) && (*loc != '"' ) ){
++ loc++;
++ }
+
+- memset(htmlBuff, 0, sizeof(htmlBuff));
++ if( *loc == '=' ) loc++;
+
+- memset((char*) &soc_in, 0, sizeof(soc_in));
+- soc_in.sin_family = AF_INET;
++ while (*loc == ' ' ){
++ loc++;
++ }
+
+- struct hostent * hostinfo;
++ int substring_len = 0;
+
+- hostinfo=url->hostinfo;
+- if ((hostinfo == NULL) || (hostinfo->h_length == 0)
+- || (hostinfo->h_addr == NULL)) {
+- // DNS error
+- g_free(time_since_start);
+- return NET_NETERROR;
+- }
+- soc_in.sin_addr=*(struct in_addr*)hostinfo->h_addr;
+- soc_in.sin_port = htons(url->port);
++ while ( loc[substring_len] != '\0' && (loc[substring_len]) != '"' ) {
++ substring_len++;
++ }
++
++ sprintf(url->fullurl,"%.*s\n",substring_len,loc);
++
++ return TRUE;
+
+- socketID = socket(AF_INET, SOCK_STREAM, 0);
+- if (socketID < 0) {
+- g_free(time_since_start);
+- return NET_NETERROR;
+ }
+
+- set_nonblock(socketID);
+- err = connect(socketID, (struct sockaddr *)&soc_in, sizeof (soc_in));
++ return FALSE;
++}
+
+- if (err < 0) {
+- if (errno == EINPROGRESS) {
+- FD_ZERO(&fds);
+- FD_SET(socketID,&fds);
+- tm.tv_sec = int(timeOut / 2);
+- tm.tv_usec=0;
+- err = select(socketID+1,NULL,&fds,NULL,&tm);
++int fetchURL(URL *url, int Post, char **Buff, char *cookiejar, int redir)
++{
++ size_t avebuffsize = 1 << 15; // 2 to the 15th power
++ CURL *curl_handle;
+
+- if (err == 0) {
+-#ifdef DEBUG_NETWORK
+- fprintf(stderr, "connection timed out\n");
+-#endif
+- CloseSocket(socketID);
+- g_free(time_since_start);
+- return NET_TIMEOUT;
+- }
+- }
+- else {
+-#ifdef DEBUG_NETWORK
+- fprintf(stderr,
+- "connection failed immediately: err=%d errno=%d (%s)\n",
+- err, errno, strerror( errno ) );
+-#endif
+- CloseSocket(socketID);
+- g_free(time_since_start);
+- return NET_NETERROR;
+- }
+- }
++ curl_memory_chunk.memory = (char *) malloc(avebuffsize*sizeof(char));
++ curl_memory_chunk.size = 0;
++ curl_memory_chunk.memsize = avebuffsize;
+
+- // did we connect, or not?
+- err = 0;
++ // Initialize the curl session
++ curl_handle = curl_easy_init();
+
+- {
+- socklen_t arglen = sizeof(int);
++ // Return both the headers and the body
++ curl_easy_setopt(curl_handle, CURLOPT_HEADER, 1);
++ curl_easy_setopt(curl_handle, CURLOPT_NOBODY, 0);
+
+- /* this is slightly grotty to avoid problems with SysV, where */
+- /* apparently getsockopt() will fail with an async error, rather */
+- /* that return it like it's supposed to. */
++ // Automatically handle all the cookies
++ curl_easy_setopt(curl_handle, CURLOPT_COOKIEJAR, cookiejar);
++ curl_easy_setopt(curl_handle, CURLOPT_COOKIEFILE, cookiejar);
+
+- if (getsockopt(socketID,SOL_SOCKET,SO_ERROR,(void *)&err,&arglen) < 0)
+- err = errno;
+- }
++ // Act like we are the mozilla browser
++ curl_easy_setopt(curl_handle, CURLOPT_USERAGENT,
++ "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040113");
+
+- if (err != 0) {
+-#ifdef DEBUG_NETWORK
+- fprintf(stderr,
+- "connection failed: err=%d errno=%d (%s)\n",
+- err, errno, strerror(errno));
+-#endif
+- CloseSocket(socketID);
+- g_free(time_since_start);
+- return NET_NETERROR;
++ // This next feature doesn't appear to work properly so we follow by hand
++ curl_easy_setopt(curl_handle, CURLOPT_FOLLOWLOCATION, 1);
++
++ // Someone should make the proxy feature work properly
++ // This is only a guess of how it might work
++ if( url->useproxy ) {
++ char proxyuserpwd[200];
++ sprintf(proxyuserpwd,"%s:%s",url->proxyuser,url->proxypass);
++ curl_easy_setopt(curl_handle,CURLOPT_PROXYUSERPWD,proxyuserpwd);
+ }
+
+- if (uname(&utsbuf) < 0)
+- UserAgent[0] = '\0';
+- else
+- sprintf(UserAgent, "User-Agent: Mozilla/5.0 (X11; U; %s %s; en-US; rv:1.5) Gecko/20031115 Firebird/0.7\r\n", utsbuf.sysname, utsbuf.machine);
++ // Set the URL we wish to fetch
++ curl_easy_setopt(curl_handle, CURLOPT_URL, url->fullurl);
+
+- // send our request for the web page.
+- // XXX: Took out the cookie for now cuz we're not yet using it.
++ // If we are posting, set the post fields
+ if (Post) {
+- int URLLen;
+- char *Args;
+-
+- URLLen = strcspn(url->url, "?");
+- Args = &url->url[URLLen];
+- if (*Args != '\0')
+- Args++;
+-
+- // do we need to authenticate through the proxy?
+- // check both user and password because I am not sure if it is possible
+- // that one or the other could be blank...we'll be safe
+- if(url->proxyuser == NULL && url->proxypass == NULL) {
+- // no.
+- sprintf(lineBuff, "POST %.*s HTTP/1.1\r\nHost: %s\r\n%sConnection: close\r\nContent-Type: application/x-www-form-urlencoded\r\nContent-Length: %i\r\n\r\n%s",
+- URLLen,
+- url->url,
+- url->hoststring,
+- UserAgent,
+- strlen(Args),
+- Args);
+- } else {
+- // yes, encode the proxy's username and password and insert it into the header
+- char *encstr = NULL;
+- basic_authentication_encode(url->proxyuser, url->proxypass, &encstr);
+- sprintf(lineBuff, "POST %.*s HTTP/1.1\r\nConnection: close\r\nHost: %s\r\nPragma: no-cache\r\nCache-control: no-cache\r\nProxy-Authorization: %s\r\nContent-Type: application/x-www-form-urlencoded\r\nContent-Length: %i\r\n%s\r\n%s",
+- URLLen,
+- url->url,
+- url->hoststring,
+- encstr,
+- strlen(Args),
+- UserAgent,
+- Args);
+- free(encstr);
+- }
+- } else if(url->proxyuser == NULL && url->proxypass == NULL) {
+- // do we need to authenticate through the proxy? no.
+- sprintf(lineBuff, "GET %s HTTP/1.1\r\nConnection: close\r\nHost: %s\r\nPragma: no-cache\r\nCache-control: no-cache\r\n%s\r\n",
+- url->url,
+- url->hoststring,
+- UserAgent);
+- } else {
+- // yes, encode the proxy's username and password and insert it into the header
+- char *encstr = NULL;
+- basic_authentication_encode(url->proxyuser, url->proxypass, &encstr);
+- sprintf(lineBuff, "GET %s HTTP/1.1\r\nConnection: close\r\nHost: %s\r\nPragma: no-cache\r\nCache-control: no-cache\r\nProxy-Authorization: %s\r\n%s\r\n",
+- url->url,
+- url->hoststring,
+- encstr,
+- UserAgent);
+- free(encstr);
++ curl_easy_setopt(curl_handle, CURLOPT_POSTFIELDS, url->fullurlpost);
+ }
+
+-#ifdef DEBUG_NETWORK
+- printf("HTTP request header:\r\n%s", lineBuff);
+-#endif
++ // All the data will be sent to this function by curl
++ curl_easy_setopt(curl_handle, CURLOPT_WRITEFUNCTION, WriteMemoryBuffer);
+
+- if (send(socketID, lineBuff, strlen(lineBuff), 0) < 0) {
++ // We will need our memory buffer to be pass to the write function
++ curl_easy_setopt(curl_handle, CURLOPT_FILE, (void *)&curl_memory_chunk);
+
+-#ifdef DEBUG_NETWORK
+- fprintf(stderr,
+- "could not send URL: errno=%d (%s)\n",
+- errno, strerror(errno));
+-#endif
+- CloseSocket(socketID);
+- g_free(time_since_start);
+- return NET_NETERROR;
+- }
++ // Now get that URL
++ if (CURLE_OK == curl_easy_perform(curl_handle)) {
+
+- buffLength = 0;
+-
+- int counter = 0;
+- bool goodcall = TRUE;
+- GTimer * time_since_pkt=g_timer_new();
+- g_timer_start(time_since_pkt);
+-
+- while (goodcall) {
+- if (cancelPressed) {
+- g_free(time_since_pkt);
+- g_free(time_since_start);
+- return NET_USERCANCEL;
+- }
+- if (counter == 5) {
+- counter = 0;
+- gtk_main_iteration_do(FALSE);
+- if (g_timer_elapsed(time_since_start,NULL) > timeOut) {
+- CloseSocket(socketID);
+- g_free(time_since_pkt);
+- g_free(time_since_start);
+- return NET_TIMEOUT;
+- }
+-
+- if (g_timer_elapsed(time_since_pkt,NULL) > timeOut/2) {
+- CloseSocket(socketID);
+- g_free(time_since_pkt);
+- g_free(time_since_start);
+- return NET_TIMEOUT;
+- }
+- }
+- recerr = recv(socketID, lineBuff, 256, 0);
+- if (recerr < 0) {
+- if (errno != EWOULDBLOCK) {
+- goodcall = FALSE;
+- break;
+- }
+- goodcall = TRUE;
+- gtk_main_iteration_do(FALSE);
+- usleep(10000);
+- counter++;
++ // If we haven't hit redirect limit, follow any redirects
++ if ( redir && checkredir(url,curl_memory_chunk.memory) ) {
++ /* cleanup curl stuff */
++ curl_easy_cleanup(curl_handle);
++ return fetchURL(url, 0, Buff, cookiejar, redir - 1);
+ }
+- else if (recerr == 0)
+- break;
+- else {
+- if ((buffLength + recerr) < HUGEBUFF) {
+- g_timer_reset(time_since_pkt);
+- counter = 0;
+- buffLength = buffLength + recerr;
+- lineBuff[recerr] = '\0';
+- strncat(htmlBuff, lineBuff, recerr);
+- goodcall = TRUE;
+- }
+- else break;
+- }
+- }
+- CloseSocket(socketID);
+
+- if (checkredir(url, htmlBuff)) {
+- g_free(time_since_pkt);
+- g_free(time_since_start);
+-
+- DPRINTF(DLOW, ("Redirection detected: %s%s\n", url->hoststring, url->url));
+-
+- /* Terminal point for recursion. */
+- if (redir > MAX_REDIR) {
+- DPRINTF(DLOW, ("Maximum redirection reached.\n"));
+- showError("Maximum redirection reached!");
+- return NET_MAXREDIR;
+- }
+- else
+- return fetchURL(url, 0, Buff, timeOut, redir+1);
++ // Set the buffer pointer to the curl memory buffer
++ *Buff= curl_memory_chunk.memory;
+
+- } else {
+- if (!goodcall) {
+- g_free(time_since_pkt);
+- g_free(time_since_start);
+- return NET_NETERROR;
+- }
+- /* make and copy buffer */
+- *Buff=(char *)malloc(buffLength+1);
+- strnzcpy(*Buff,htmlBuff,buffLength);
++ /* cleanup curl stuff */
++ curl_easy_cleanup(curl_handle);
+
+- g_free(time_since_start);
+- g_free(time_since_pkt);
+ return NET_SUCCESS;
++
++ } else {
++
++ /* cleanup curl stuff */
++ curl_easy_cleanup(curl_handle);
++
++ return NET_NETERROR;
+ }
++
+ }
+
+