summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJohn Bailey <rekkanoryo@rekkanoryo.org>2009-06-19 20:34:02 +0000
committerJohn Bailey <rekkanoryo@rekkanoryo.org>2009-06-19 20:34:02 +0000
commitcbc3e2d580b46b2a94565888a2f4061531386af1 (patch)
tree6d53480589a9fca4bfa95c60c55a49470fc22833
parent4f8cd762499fac7244643b7469386dca3c5b982a (diff)
downloadpidgin-cbc3e2d580b46b2a94565888a2f4061531386af1.tar.gz
Ugly hack to get around needing a minor bump to make this authentication code
work with some proxies. I yanked the relevant code from 2.6.0devel's libpurple/util.c and made it local to the yahoo prpl.
-rw-r--r--libpurple/protocols/yahoo/util.c546
-rw-r--r--libpurple/protocols/yahoo/yahoo.h5
2 files changed, 551 insertions, 0 deletions
diff --git a/libpurple/protocols/yahoo/util.c b/libpurple/protocols/yahoo/util.c
index bec74be2ab..2dd7c997b3 100644
--- a/libpurple/protocols/yahoo/util.c
+++ b/libpurple/protocols/yahoo/util.c
@@ -24,14 +24,560 @@
#include "config.h"
#endif
+#include "cipher.h"
#include "debug.h"
#include "internal.h"
#include "prpl.h"
+#include "util.h"
#include "yahoo.h"
#include <string.h>
+struct _PurpleUtilFetchUrlData
+{
+ PurpleUtilFetchUrlCallback callback;
+ void *user_data;
+
+ struct
+ {
+ char *user;
+ char *passwd;
+ char *address;
+ int port;
+ char *page;
+
+ } website;
+
+ char *url;
+ int num_times_redirected;
+ gboolean full;
+ char *user_agent;
+ gboolean http11;
+ char *request;
+ gsize request_written;
+ gboolean include_headers;
+
+ gboolean is_ssl;
+ PurpleSslConnection *ssl_connection;
+ PurpleProxyConnectData *connect_data;
+ int fd;
+ guint inpa;
+
+ gboolean got_headers;
+ gboolean has_explicit_data_len;
+ char *webdata;
+ unsigned long len;
+ unsigned long data_len;
+ gssize max_len;
+};
+
+/**
+ * The arguments to this function are similar to printf.
+ */
+static void
+purple_util_fetch_url_error(PurpleUtilFetchUrlData *gfud, const char *format, ...)
+{
+ gchar *error_message;
+ va_list args;
+
+ va_start(args, format);
+ error_message = g_strdup_vprintf(format, args);
+ va_end(args);
+
+ gfud->callback(gfud, gfud->user_data, NULL, 0, error_message);
+ g_free(error_message);
+ purple_util_fetch_url_cancel(gfud);
+}
+static void url_fetch_connect_cb(gpointer url_data, gint source, const gchar *error_message);
+static void ssl_url_fetch_connect_cb(gpointer data, PurpleSslConnection *ssl_connection, PurpleInputCondition cond);
+static void ssl_url_fetch_error_cb(PurpleSslConnection *ssl_connection, PurpleSslErrorType error, gpointer data);
+
+static gboolean
+parse_redirect(const char *data, size_t data_len,
+ PurpleUtilFetchUrlData *gfud)
+{
+ gchar *s;
+ gchar *new_url, *temp_url, *end;
+ gboolean full;
+ int len;
+
+ if ((s = g_strstr_len(data, data_len, "\nLocation: ")) == NULL)
+ /* We're not being redirected */
+ return FALSE;
+
+ s += strlen("Location: ");
+ end = strchr(s, '\r');
+
+ /* Just in case :) */
+ if (end == NULL)
+ end = strchr(s, '\n');
+
+ if (end == NULL)
+ return FALSE;
+
+ len = end - s;
+
+ new_url = g_malloc(len + 1);
+ strncpy(new_url, s, len);
+ new_url[len] = '\0';
+
+ full = gfud->full;
+
+ if (*new_url == '/' || g_strstr_len(new_url, len, "://") == NULL)
+ {
+ temp_url = new_url;
+
+ new_url = g_strdup_printf("%s:%d%s", gfud->website.address,
+ gfud->website.port, temp_url);
+
+ g_free(temp_url);
+
+ full = FALSE;
+ }
+
+ purple_debug_info("util", "Redirecting to %s\n", new_url);
+
+ gfud->num_times_redirected++;
+ if (gfud->num_times_redirected >= 5)
+ {
+ purple_util_fetch_url_error(gfud,
+ _("Could not open %s: Redirected too many times"),
+ gfud->url);
+ return TRUE;
+ }
+
+ /*
+ * Try again, with this new location. This code is somewhat
+ * ugly, but we need to reuse the gfud because whoever called
+ * us is holding a reference to it.
+ */
+ g_free(gfud->url);
+ gfud->url = new_url;
+ gfud->full = full;
+ g_free(gfud->request);
+ gfud->request = NULL;
+
+ if (gfud->is_ssl) {
+ gfud->is_ssl = FALSE;
+ purple_ssl_close(gfud->ssl_connection);
+ gfud->ssl_connection = NULL;
+ } else {
+ purple_input_remove(gfud->inpa);
+ gfud->inpa = 0;
+ close(gfud->fd);
+ gfud->fd = -1;
+ }
+ gfud->request_written = 0;
+ gfud->len = 0;
+ gfud->data_len = 0;
+
+ g_free(gfud->website.user);
+ g_free(gfud->website.passwd);
+ g_free(gfud->website.address);
+ g_free(gfud->website.page);
+ purple_url_parse(new_url, &gfud->website.address, &gfud->website.port,
+ &gfud->website.page, &gfud->website.user, &gfud->website.passwd);
+
+ if (purple_strcasestr(new_url, "https://") != NULL) {
+ gfud->is_ssl = TRUE;
+ gfud->ssl_connection = purple_ssl_connect(NULL,
+ gfud->website.address, gfud->website.port,
+ ssl_url_fetch_connect_cb, ssl_url_fetch_error_cb, gfud);
+ } else {
+ gfud->connect_data = purple_proxy_connect(NULL, NULL,
+ gfud->website.address, gfud->website.port,
+ url_fetch_connect_cb, gfud);
+ }
+
+ if (gfud->ssl_connection == NULL && gfud->connect_data == NULL)
+ {
+ purple_util_fetch_url_error(gfud, _("Unable to connect to %s"),
+ gfud->website.address);
+ }
+
+ return TRUE;
+}
+
+static size_t
+parse_content_len(const char *data, size_t data_len)
+{
+ size_t content_len = 0;
+ const char *p = NULL;
+
+ /* This is still technically wrong, since headers are case-insensitive
+ * [RFC 2616, section 4.2], though this ought to catch the normal case.
+ * Note: data is _not_ nul-terminated.
+ */
+ if(data_len > 16) {
+ p = (strncmp(data, "Content-Length: ", 16) == 0) ? data : NULL;
+ if(!p)
+ p = (strncmp(data, "CONTENT-LENGTH: ", 16) == 0)
+ ? data : NULL;
+ if(!p) {
+ p = g_strstr_len(data, data_len, "\nContent-Length: ");
+ if (p)
+ p++;
+ }
+ if(!p) {
+ p = g_strstr_len(data, data_len, "\nCONTENT-LENGTH: ");
+ if (p)
+ p++;
+ }
+
+ if(p)
+ p += 16;
+ }
+
+ /* If we can find a Content-Length header at all, try to sscanf it.
+ * Response headers should end with at least \r\n, so sscanf is safe,
+ * if we make sure that there is indeed a \n in our header.
+ */
+ if (p && g_strstr_len(p, data_len - (p - data), "\n")) {
+ sscanf(p, "%" G_GSIZE_FORMAT, &content_len);
+ purple_debug_misc("util", "parsed %" G_GSIZE_FORMAT "\n", content_len);
+ }
+
+ return content_len;
+}
+
+
+static void
+url_fetch_recv_cb(gpointer url_data, gint source, PurpleInputCondition cond)
+{
+ PurpleUtilFetchUrlData *gfud = url_data;
+ int len;
+ char buf[4096];
+ char *data_cursor;
+ gboolean got_eof = FALSE;
+
+ /*
+ * Read data in a loop until we can't read any more! This is a
+ * little confusing because we read using a different function
+ * depending on whether the socket is ssl or cleartext.
+ */
+ while ((gfud->is_ssl && ((len = purple_ssl_read(gfud->ssl_connection, buf, sizeof(buf))) > 0)) ||
+ (!gfud->is_ssl && (len = read(source, buf, sizeof(buf))) > 0))
+ {
+ if(gfud->max_len != -1 && (gfud->len + len) > gfud->max_len) {
+ purple_util_fetch_url_error(gfud, _("Error reading from %s: response too long (%d bytes limit)"),
+ gfud->website.address, gfud->max_len);
+ return;
+ }
+
+ /* If we've filled up our buffer, make it bigger */
+ if((gfud->len + len) >= gfud->data_len) {
+ while((gfud->len + len) >= gfud->data_len)
+ gfud->data_len += sizeof(buf);
+
+ gfud->webdata = g_realloc(gfud->webdata, gfud->data_len);
+ }
+
+ data_cursor = gfud->webdata + gfud->len;
+
+ gfud->len += len;
+
+ memcpy(data_cursor, buf, len);
+
+ gfud->webdata[gfud->len] = '\0';
+
+ if(!gfud->got_headers) {
+ char *tmp;
+
+ /* See if we've reached the end of the headers yet */
+ if((tmp = strstr(gfud->webdata, "\r\n\r\n"))) {
+ char * new_data;
+ guint header_len = (tmp + 4 - gfud->webdata);
+ size_t content_len;
+
+ purple_debug_misc("util", "Response headers: '%.*s'\n",
+ header_len, gfud->webdata);
+
+ /* See if we can find a redirect. */
+ if(parse_redirect(gfud->webdata, header_len, gfud))
+ return;
+
+ gfud->got_headers = TRUE;
+
+ /* No redirect. See if we can find a content length. */
+ content_len = parse_content_len(gfud->webdata, header_len);
+
+ if(content_len == 0) {
+ /* We'll stick with an initial 8192 */
+ content_len = 8192;
+ } else {
+ gfud->has_explicit_data_len = TRUE;
+ }
+
+
+ /* If we're returning the headers too, we don't need to clean them out */
+ if(gfud->include_headers) {
+ gfud->data_len = content_len + header_len;
+ gfud->webdata = g_realloc(gfud->webdata, gfud->data_len);
+ } else {
+ size_t body_len = 0;
+
+ if(gfud->len > (header_len + 1))
+ body_len = (gfud->len - header_len);
+
+ content_len = MAX(content_len, body_len);
+
+ new_data = g_try_malloc(content_len);
+ if(new_data == NULL) {
+ purple_debug_error("util",
+ "Failed to allocate %" G_GSIZE_FORMAT " bytes: %s\n",
+ content_len, g_strerror(errno));
+ purple_util_fetch_url_error(gfud,
+ _("Unable to allocate enough memory to hold "
+ "the contents from %s. The web server may "
+ "be trying something malicious."),
+ gfud->website.address);
+
+ return;
+ }
+
+ /* We may have read part of the body when reading the headers, don't lose it */
+ if(body_len > 0) {
+ tmp += 4;
+ memcpy(new_data, tmp, body_len);
+ }
+
+ /* Out with the old... */
+ g_free(gfud->webdata);
+
+ /* In with the new. */
+ gfud->len = body_len;
+ gfud->data_len = content_len;
+ gfud->webdata = new_data;
+ }
+ }
+ }
+
+ if(gfud->has_explicit_data_len && gfud->len >= gfud->data_len) {
+ got_eof = TRUE;
+ break;
+ }
+ }
+
+ if(len < 0) {
+ if(errno == EAGAIN) {
+ return;
+ } else {
+ purple_util_fetch_url_error(gfud, _("Error reading from %s: %s"),
+ gfud->website.address, g_strerror(errno));
+ return;
+ }
+ }
+
+ if((len == 0) || got_eof) {
+ gfud->webdata = g_realloc(gfud->webdata, gfud->len + 1);
+ gfud->webdata[gfud->len] = '\0';
+
+ gfud->callback(gfud, gfud->user_data, gfud->webdata, gfud->len, NULL);
+ purple_util_fetch_url_cancel(gfud);
+ }
+}
+
+static void ssl_url_fetch_recv_cb(gpointer data, PurpleSslConnection *ssl_connection, PurpleInputCondition cond)
+{
+ url_fetch_recv_cb(data, -1, cond);
+}
+
+/*
+ * This function is called when the socket is available to be written
+ * to.
+ *
+ * @param source The file descriptor that can be written to. This can
+ * be an http connection or it can be the SSL connection of an
+ * https request. So be careful what you use it for! If it's
+ * an https request then use purple_ssl_write() instead of
+ * writing to it directly.
+ */
+static void
+url_fetch_send_cb(gpointer data, gint source, PurpleInputCondition cond)
+{
+ PurpleUtilFetchUrlData *gfud;
+ int len, total_len;
+
+ gfud = data;
+
+ if (gfud->request == NULL)
+ {
+ /* Host header is not forbidden in HTTP/1.0 requests, and HTTP/1.1
+ * clients must know how to handle the "chunked" transfer encoding.
+ * Purple doesn't know how to handle "chunked", so should always send
+ * the Host header regardless, to get around some observed problems
+ */
+ if (gfud->user_agent) {
+ gfud->request = g_strdup_printf(
+ "GET %s%s HTTP/%s\r\n"
+ "Connection: close\r\n"
+ "User-Agent: %s\r\n"
+ "Accept: */*\r\n"
+ "Host: %s\r\n\r\n",
+ (gfud->full ? "" : "/"),
+ (gfud->full ? (gfud->url ? gfud->url : "") : (gfud->website.page ? gfud->website.page : "")),
+ (gfud->http11 ? "1.1" : "1.0"),
+ (gfud->user_agent ? gfud->user_agent : ""),
+ (gfud->website.address ? gfud->website.address : ""));
+ } else {
+ gfud->request = g_strdup_printf(
+ "GET %s%s HTTP/%s\r\n"
+ "Connection: close\r\n"
+ "Accept: */*\r\n"
+ "Host: %s\r\n\r\n",
+ (gfud->full ? "" : "/"),
+ (gfud->full ? (gfud->url ? gfud->url : "") : (gfud->website.page ? gfud->website.page : "")),
+ (gfud->http11 ? "1.1" : "1.0"),
+ (gfud->website.address ? gfud->website.address : ""));
+ }
+ }
+
+ if(g_getenv("PURPLE_UNSAFE_DEBUG"))
+ purple_debug_misc("util", "Request: '%s'\n", gfud->request);
+ else
+ purple_debug_misc("util", "request constructed\n");
+
+ total_len = strlen(gfud->request);
+
+ if (gfud->is_ssl)
+ len = purple_ssl_write(gfud->ssl_connection, gfud->request + gfud->request_written,
+ total_len - gfud->request_written);
+ else
+ len = write(gfud->fd, gfud->request + gfud->request_written,
+ total_len - gfud->request_written);
+
+ if (len < 0 && errno == EAGAIN)
+ return;
+ else if (len < 0) {
+ purple_util_fetch_url_error(gfud, _("Error writing to %s: %s"),
+ gfud->website.address, g_strerror(errno));
+ return;
+ }
+ gfud->request_written += len;
+
+ if (gfud->request_written < total_len)
+ return;
+
+ /* We're done writing our request, now start reading the response */
+ if (gfud->is_ssl) {
+ purple_input_remove(gfud->inpa);
+ gfud->inpa = 0;
+ purple_ssl_input_add(gfud->ssl_connection, ssl_url_fetch_recv_cb, gfud);
+ } else {
+ purple_input_remove(gfud->inpa);
+ gfud->inpa = purple_input_add(gfud->fd, PURPLE_INPUT_READ, url_fetch_recv_cb,
+ gfud);
+ }
+}
+
+static void
+url_fetch_connect_cb(gpointer url_data, gint source, const gchar *error_message)
+{
+ PurpleUtilFetchUrlData *gfud;
+
+ gfud = url_data;
+ gfud->connect_data = NULL;
+
+ if (source == -1)
+ {
+ purple_util_fetch_url_error(gfud, _("Unable to connect to %s: %s"),
+ (gfud->website.address ? gfud->website.address : ""), error_message);
+ return;
+ }
+
+ gfud->fd = source;
+
+ gfud->inpa = purple_input_add(source, PURPLE_INPUT_WRITE,
+ url_fetch_send_cb, gfud);
+ url_fetch_send_cb(gfud, source, PURPLE_INPUT_WRITE);
+}
+
+static void ssl_url_fetch_connect_cb(gpointer data, PurpleSslConnection *ssl_connection, PurpleInputCondition cond)
+{
+ PurpleUtilFetchUrlData *gfud;
+
+ gfud = data;
+
+ gfud->inpa = purple_input_add(ssl_connection->fd, PURPLE_INPUT_WRITE,
+ url_fetch_send_cb, gfud);
+ url_fetch_send_cb(gfud, ssl_connection->fd, PURPLE_INPUT_WRITE);
+}
+
+static void ssl_url_fetch_error_cb(PurpleSslConnection *ssl_connection, PurpleSslErrorType error, gpointer data)
+{
+ PurpleUtilFetchUrlData *gfud;
+
+ gfud = data;
+ gfud->ssl_connection = NULL;
+
+ purple_util_fetch_url_error(gfud, _("Unable to connect to %s: %s"),
+ (gfud->website.address ? gfud->website.address : ""),
+ purple_ssl_strerror(error));
+}
+
+PurpleUtilFetchUrlData *
+purple_util_fetch_url_request_len_with_account(PurpleAccount *account,
+ const char *url, gboolean full, const char *user_agent, gboolean http11,
+ const char *request, gboolean include_headers, gssize max_len,
+ PurpleUtilFetchUrlCallback callback, void *user_data)
+{
+ PurpleUtilFetchUrlData *gfud;
+
+ g_return_val_if_fail(url != NULL, NULL);
+ g_return_val_if_fail(callback != NULL, NULL);
+
+ if(g_getenv("PURPLE_UNSAFE_DEBUG"))
+ purple_debug_info("util",
+ "requested to fetch (%s), full=%d, user_agent=(%s), http11=%d\n",
+ url, full, user_agent?user_agent:"(null)", http11);
+ else
+ purple_debug_info("util", "requesting to fetch a URL\n");
+
+ gfud = g_new0(PurpleUtilFetchUrlData, 1);
+
+ gfud->callback = callback;
+ gfud->user_data = user_data;
+ gfud->url = g_strdup(url);
+ gfud->user_agent = g_strdup(user_agent);
+ gfud->http11 = http11;
+ gfud->full = full;
+ gfud->request = g_strdup(request);
+ gfud->include_headers = include_headers;
+ gfud->fd = -1;
+ gfud->max_len = max_len;
+
+ purple_url_parse(url, &gfud->website.address, &gfud->website.port,
+ &gfud->website.page, &gfud->website.user, &gfud->website.passwd);
+
+ if (purple_strcasestr(url, "https://") != NULL) {
+ if (!purple_ssl_is_supported()) {
+ purple_util_fetch_url_error(gfud,
+ _("Unable to connect to %s: Server requires TLS/SSL, but no TLS/SSL support was found."),
+ gfud->website.address);
+ return NULL;
+ }
+
+ gfud->is_ssl = TRUE;
+ gfud->ssl_connection = purple_ssl_connect(account,
+ gfud->website.address, gfud->website.port,
+ ssl_url_fetch_connect_cb, ssl_url_fetch_error_cb, gfud);
+ } else {
+ gfud->connect_data = purple_proxy_connect(NULL, account,
+ gfud->website.address, gfud->website.port,
+ url_fetch_connect_cb, gfud);
+ }
+
+ if (gfud->ssl_connection == NULL && gfud->connect_data == NULL)
+ {
+ purple_util_fetch_url_error(gfud, _("Unable to connect to %s"),
+ gfud->website.address);
+ return NULL;
+ }
+
+ return gfud;
+}
+
gboolean
yahoo_account_use_http_proxy(PurpleConnection *conn)
{
diff --git a/libpurple/protocols/yahoo/yahoo.h b/libpurple/protocols/yahoo/yahoo.h
index 29419f9cd0..aba6f15043 100644
--- a/libpurple/protocols/yahoo/yahoo.h
+++ b/libpurple/protocols/yahoo/yahoo.h
@@ -271,4 +271,9 @@ gchar* yahoo_get_cookies(PurpleConnection *gc);
gboolean yahoo_send_attention(PurpleConnection *gc, const char *username, guint type);
GList *yahoo_attention_types(PurpleAccount *account);
+/* This is a hack for 2.5.7 to get the y16 login to work properly with http proxies */
+PurpleUtilFetchUrlData * purple_util_fetch_url_request_len_with_account(PurpleAccount *account,
+ const char *url, gboolean full, const char *user_agent, gboolean http11,
+ const char *request, gboolean include_headers, gssize max_len,
+ PurpleUtilFetchUrlCallback callback, void *user_data);
#endif /* _YAHOO_H_ */