view SrvTxtUtils.py @ 2:ac8e119b25ec

trying to make import from xml work
author casties
date Tue, 26 Jul 2011 11:55:19 +0200
parents e4bae49e657b
children 7f0e2b656e5c
line wrap: on
line source

"""Utility methods for handling XML, reading HTTP, etc"""

import sys
import urllib
import urllib2
import logging


srvTxtUtilsVersion = "1.1"

def getInt(number, default=0):
    """returns always an int (0 in case of problems)"""
    try:
        return int(number)
    except:
        return int(default)

def getAt(array, idx, default=None):
    """returns element idx from array or default (in case of problems)"""
    try:
        return array[idx]
    except:
        return default

def getText(node):
    """returns all text content of a node and its subnodes"""
    if node is None:
        return ""
    # ElementTree:
    text = node.text or ""
    for e in node:
        text += gettext(e)
        if e.tail:
            text += e.tail

    # 4Suite:
    #nodelist=node.childNodes
    #text = ""
    #for n in nodelist:
    #    if n.nodeType == node.TEXT_NODE:
    #       text = text + n.data
    
    return text



def getHttpData(url, data=None, num_tries=3, timeout=10):
    """returns result from url+data HTTP request"""
    # we do GET (by appending data to url)
    if isinstance(data, str) or isinstance(data, unicode):
        # if data is string then append
        url = "%s?%s"%(url,data)
    elif isinstance(data, dict) or isinstance(data, list) or isinstance(data, tuple):
        # urlencode
        url = "%s?%s"%(url,urllib.urlencode(data))
    
    response = None
    errmsg = None
    for cnt in range(num_tries):
        try:
            logging.debug("getHttpData(#%s %ss) url=%s"%(cnt+1,timeout,url))
            if sys.version_info < (2, 6):
                # set timeout on socket -- ugly :-(
                import socket
                socket.setdefaulttimeout(float(timeout))
                response = urllib2.urlopen(url)
            else:
                # timeout as parameter
                response = urllib2.urlopen(url,timeout=float(timeout))
            # check result?
            break
        except urllib2.HTTPError, e:
            logging.error("getHttpData: HTTP error(%s): %s"%(e.code,e))
            errmsg = str(e)
            # stop trying
            break
        except urllib2.URLError, e:
            logging.error("getHttpData: URLLIB error(%s): %s"%(e.reason,e))
            errmsg = str(e)
            # stop trying
            #break

    if response is not None:
        data = response.read()
        response.close()
        return data
    
    raise IOError("ERROR fetching HTTP data from %s: %s"%(url,errmsg))
    #return None