from sfurls import *
from sfregex import *
from constants import COOKIE_FILENAME,SF_FTP_SERVER,SF_FTP_DIRECTORY
from ProjectVO import ProjectVO
from PackageVO import PackageVO
from urllib import urlencode,quote_plus
try:
# python2.4 standard
from urllib2 import build_opener,HTTPCookieProcessor
except:
# python2.3 compatibility
from python24.urllib2 import build_opener,HTTPCookieProcessor
import logging
from messageQueue import mainMessageQueue,progressMessageQueue
import os, sys
import ftplib
import time
debug = logging.getLogger("sfcomm").debug
error = logging.getLogger("sfcomm").error
# disable debug level logging in the cookielib module
logging.getLogger("cookielib").setLevel(logging.WARN)
try:
import cookielib
except ImportError:
try:
import python24.cookielib as cookielib
except:
print "Missing cookielib (requires python 2.4)"
sys.exit(1)
try:
set = set
except:
from sets import Set
set = Set
TRUNCATE_POST = 1024
class SFComm:
def __init__(self, data_path):
self.cookie_file = os.path.join(data_path, COOKIE_FILENAME)
def remove_cookie(self):
try:
os.remove(self.cookie_file)
except:
pass
def fetch_url(self, url, post_encoded=None, queue=progressMessageQueue, addl_headers=None):
data = ""
try:
debug("Fetching: %s", url)
if post_encoded:
if len(post_encoded) > TRUNCATE_POST: truncated = "..."
else: truncated = ""
debug("posted: %s%s", post_encoded[:TRUNCATE_POST], truncated)
queue.put("Fetching: %s" % url)
cj = cookielib.MozillaCookieJar()
cj.load(self.cookie_file, True)
opener = build_opener(HTTPCookieProcessor(cj))
if addl_headers:
opener.addheaders = addl_headers
r = opener.open(url, post_encoded)
# cj.save(self.cookie_file, True)
data = r.read()
# import time
# time.sleep(10000)
debug("Done")
except Exception, e:
print e
return data
def login(self, username, password, return_to=None):
# returns a set of projects for the given username/password
# or None
debug("Attempting to login to SourceForge")
form = {'form_loginname': username,
'form_pw': password,
'stay_in_ssl': '1',
'persistent_login': '1',
'login': 'Login With SSL'}
url = LOGIN_URL
if return_to: url += "?return_to=%s" % urlencode(return_to)
cj = cookielib.MozillaCookieJar()
opener = build_opener(HTTPCookieProcessor(cj))
try:
r = opener.open(url, urlencode(form))
except Exception, e:
error(e)
return set()
cj.save(self.cookie_file, True, True) # ignore discard! ignore expires
data = r.read()
# need to validate success or failure!!!!
#debug("data: %s", data)
projects = set()
project_tuples = PROJECTS_FROM_LOGIN.findall(data)
for pt in project_tuples:
projects.add(ProjectVO(pt[2], pt[1], pt[0]))
debug("login - projects fetched: %d" % len(projects))
#print "projects:", projects
#print "data:", data
return projects
def get_packages(self, group_id):
url = "%s=%s" % (EDIT_PACKAGE_BASE_URL, group_id)
packages = set()
data = self.fetch_url(url, queue=mainMessageQueue)
#debug("data: %s", data)
fullaccess = "True"
package_tuples = PACKAGES_FULL_ACCESS.findall(data)
if not package_tuples:
fullaccess = "False"
package_tuples = PACKAGES_LIMITED_ACCESS.findall(data)
for package_tuple in package_tuples:
packages.add(PackageVO(package_tuple[1],
package_tuple[0],
package_tuple[2],
fullaccess))
debug("Found %d packages for group_id %s", len(packages), group_id)
return packages
def add_package(self, group_id, package_name):
url = ADD_PACKAGE_URL
form = {'group_id': group_id,
'func': "add_package",
"package_name": package_name,
'submit': "Create This Package"}
data = self.fetch_url(url, urlencode(form), mainMessageQueue)
if data.find("Added Package") != -1: return 1
else: return 0
def update_package(self, group_id, package_id, package_name, package_status):
url = UPDATE_PACKAGE_URL
if package_status.lower() == 'active': status_id = '1'
else: status_id = '3'
form = {'group_id': group_id,
'package_id': package_id,
'func': 'update_package',
'package_name': package_name,
'status_id': status_id,
'submit': 'Update'}
data = self.fetch_url(url, urlencode(form), mainMessageQueue)
if data.find("Updated Package") != -1: return 1
else: return 0
def add_release(self, group_id, package_id, name):
url = "%s?package_id=%s&group_id=%s" % (NEW_RELEASE_URL,
package_id,
group_id)
form = {'package_id': package_id,
'group_id': group_id,
'release_name': name,
'newrelease': 'yes',
'submit': 'Create This Release'}
data = self.fetch_url(url, urlencode(form))
#print url
m = GET_RELEASE_ID.search(data)
if m:
return m.group("releaseid")
else:
if data.find("Error creating Project object") != -1:
progressMessageQueue.put("Error creating Project object")
elif data.find("Login to SourceForge.net") != -1:
progressMessageQueue.put("Login required?")
#print data
else:
progressMessageQueue.put("Error creating new release")
return None
def edit_release_step1(self, group_id, package_id, release_id,
name, release_notes, change_log, status='active'):
url = EDIT_RELEASE_URL
if status.lower() == 'active': status_id = '1'
else: status_id = '3'
form = {'package_id': package_id,
'new_package_id': package_id,
'group_id': group_id,
'release_id': release_id,
'release_name': name,
'release_date': time.strftime("%Y-%m-%d"),
'step1': '1',
'status_id': status_id,
'uploaded_notes': '',
'uploaded_changes': '',
'release_notes': release_notes,
'release_changes': change_log,
'preformatted': '1',
'submit': 'Submit/Refresh'
}
data = self.fetch_url(url,
urlencode(form))
if data.find("Data Saved") != -1: return True
else: return False
def edit_release_step2(self, group_id, package_id, release_id, fileVOs):
url = EDIT_RELEASE_URL
form = {'package_id': package_id,
'group_id': group_id,
'release_id': release_id,
'step2': '1',
'submit': 'Add Files and/or Refresh View'}
enc = urlencode(form)
for fileVO in fileVOs:
filename = fileVO.getFilename()
enc += "&file_list%s=%s" % (quote_plus("[]"), quote_plus(filename))
data = self.fetch_url(url, enc)
if data.find("File(s) Added") != -1: ok = True
else:
print data
print "------------------------------------------------------"
ok = False
matches = GET_FILE_ID.findall(data)
lookup = {}
for match in matches:
# map each filename to fileid
lookup[match[1]] = match[0]
return lookup, ok
def edit_release_step3(self, group_id, package_id, release_id, fileVOs, lookup_dict):
url = EDIT_RELEASE_URL
form = {'package_id': package_id,
'group_id': group_id,
'release_id': release_id,
'new_release_id': release_id,
'step3': '1',
'release_time': time.strftime("%Y-%m-%d"),
'submit': 'Update/Refresh'}
ok = True
for fileVO in fileVOs:
form['processor_id'] = fileVO.getProcessorId()
form['type_id'] = fileVO.getFileTypeId()
form['file_id'] = lookup_dict[fileVO.getFilename()]
data = self.fetch_url(url, urlencode(form))
if data.find("File Updated") != -1: ok &= True
else: ok &= False
#print data
#print "\n\n\n\n\n---------------------------------\n"
return ok
def edit_release_step4(self, group_id, package_id, release_id):
url = EDIT_RELEASE_URL
form = {'package_id': package_id,
'group_id': group_id,
'release_id': release_id,
'step4': 'Email Release',
'sure': '1',
'submit': 'Send Notice'}
data = self.fetch_url(url, urlencode(form))
# notification status?!?!
return data
def upload_files(self, parent, fileVOs):
ftp = ftplib.FTP()
try:
ftp.connect(SF_FTP_SERVER)
ftp.getwelcome()
ftp.login("ftp", "releaseforge")
ftp.cwd(SF_FTP_DIRECTORY)
num_uploaded = 0
for fileVO in fileVOs:
fullpath = fileVO.getFullPath()
filename = fileVO.getFilename()
progressMessageQueue.put("Uploading: %s" % filename)
f = open(fullpath, "rb")
ftp.storbinary('STOR ' + filename, f)
f.close()
num_uploaded += 1
if parent.isCancelled(): break
if num_uploaded < len(fileVOs):
msg = "File upload operation cancelled"
else:
msg = "Successfully uploaded %d files" % num_uploaded
except Exception, e:
msg = str(e)
progressMessageQueue.put("FTP error: %s" % msg)
debug(msg)
try:
ftp.close()
except: pass
return msg
if __name__ == '__main__':
pass
|