I had to install subversion on a couple HP-UX boxes. Fortunately, there's an HP-UX software archive out there with precompiled versions of lots of software. Unfortunately, dependency resolution is like the bad old days of 1997: entirely manual. And there's fifteen or so dependencies for subversion.
So, I wrote a script to parse the dependencies and download the packages automatically. It requires Python -- which you can install from the archive with just the Python package and the Db package -- and BeautifulSoup, which you can google for. Usage is
hpuxinstaller <archive package url> <package name> [e.g., hpuxinstaller http://hpux.cs.utah.edu/hppd/hpux/Development/Tools/subversion-1.4.4/ subversion] [wait for packages to download] gunzip *.gz [paste in conveniently given swinstall commands]
Here is the script:
#!/usr/local/bin/python
import urlparse, urllib2, sys, os
from subprocess import Popen, PIPE
from BeautifulSoup import BeautifulSoup
required = {}
if not os.path.exists('cache'):
    os.mkdir('cache')
def getcachedpage(url):
    fname = 'cache/' + url.replace('/', '-')
    try:
        page = file(fname).read()
    except IOError:
        print 'fetching ' + url
        page = urllib2.urlopen(url).read()
        file(fname, 'wb').write(page)
    return page
def dependencies(url):
    scheme, netloc, _, _, _, _ = urlparse.urlparse(url)
    soup = BeautifulSoup(getcachedpage(url))
    text = soup.find('td', text='Run-time dependencies:')
    if not text:
        return
    tr = text.parent.parent
    td = tr.findAll('td')[1]
    for a in td.findAll('a'):
        yield (a.contents[0], '%s://%s%s' % (scheme, netloc, a['href']))
def add(name, url):
    required[name] = url
    for depname, depurl in dependencies(url):
        if depname in required:
            continue
        print "%s requires %s" % (name, depname)
        required[depname] = depurl
        add(depname, depurl)
        
def download(full_url):
    print 'downloading ' + full_url
    _, _, path, _, _, _ = urlparse.urlparse(full_url)
    fname = os.path.basename(path)
    f = file(fname, 'wb')
    def chunkify_to_eof(stream, chunksize=64*1024):
        while True:
            data = stream.read(chunksize)
            if not data:
                break
            yield data
    for chunk in chunkify_to_eof(urllib2.urlopen(full_url)):
        f.write(chunk)
# Compute dependencies before checking for installed files, since swinstall
# can let a package be installed w/o its dependencies. If there are such
# packages installed we don't want to skip their [missing] dependencies.
add(sys.argv[2], sys.argv[1])
try:
    p = Popen(['swlist'], stdout=PIPE)
except:
    print 'Warning: unable to list installed packages'
    installed = {}
else:
    installed = set(line.strip().split()[0] for line in p.stdout if line.strip())
to_install = []
for name, url in required.iteritems():
    if name in installed:
        print name + ' is already installed'
        continue
    full_url = '%s%s-ia64-11.23.depot.gz' % (url.replace('/hppd/', '/ftp/'), url.split('/')[-2])
    to_install.append(os.path.basename(full_url))
    download(full_url)
if to_install:
    print "\nAfter gunzip, run:"
    for fname in to_install:
        print "swinstall -s %s/%s %s" % (os.getcwd(), fname[:-3], fname.split('-')[0])
else:
    print 'Everything is already installed'
Comments
http://forums11.itrc.hp.com/service/forums/familyhome.do?familyId=117
incidentally - swinstall is quite capable of checking for dependencies durng install - its just those that build the packages for the software porting & archive centre don't bother with this. anything that comes direct from HP generally does check dependencies...
Download From:
http://hpux.connect.org.uk/hppd/hpux/Sysadmin/depothelper-2.00/
Usages:
Install: # depothelper wget
Un-Install: # depothelper -u wget
Only List Dependencies: # depothelper -l wget
Thnk!
Shirish Shukla
Learn & Implement !!