1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
|
# vim: set sw=4 sts=4 et :
# Copyright: 2008 Gentoo Foundation
# Author(s): Nirbheek Chauhan <nirbheek.chauhan@gmail.com>
# License: GPL-3
#
# Immortal lh!
#
import os, shutil, urllib2, atexit
import os.path as osp
import cPickle as pickle
from autotua import fetch, config, sync, chroot, jobuild
class Jobs:
"""Interface to jobs on the master server that we can do"""
def __init__(self):
self.pubkey = ''
def getjobs(self):
"""
Get a list of jobs
(skeleton code atm)
"""
jobs = []
job_list = pickle.load(urllib2.urlopen(config.AUTOTUA_MASTER+'/slave_api/jobs'))
for job_data in job_list:
jobs.append(Job(job_data))
return jobs
class Job:
"""A Job."""
def __init__(self, job_data):
self.maint_details = job_data['maintainer']
self.maint = job_data['maintainer']['username']
self.name = job_data['name']
self.stage = fetch.Fetchable(uri=[job_data['stage']])
self.jobdir = osp.join(config.WORKDIR, self.maint, self.name)
self.jobtagedir = osp.join(self.jobdir, 'jobtage')
self.jobtagerev = job_data['jobtagerev']
self.atoms = job_data['atoms']
self.jobuilds = []
self.chroot = chroot.WorkChroot(self.jobdir, self.stage.filename)
atexit.register(self.tidy)
def __repr__(self):
return '<%s: %s>' % (self.name, 'Job object')
def __str__(self):
return '%s object' % self.name
# Get jobuild SRC_URI -> $tmpdir/jobfiles
# jobfile -(link)-> $chroot_tmpdir/jobfiles
def _setup_jobfiles(self):
job_src = []
action = 'link'
fetcher = fetch.Fetcher(config.JOBFILE_DIR)
processor = jobuild.Processor(None, self.chroot)
for jbld in self.jobuilds:
processor.jobuild = jbld
src_uri = processor.get_var('SRC_URI').split()
for i in src_uri:
job_src.append(fetch.Fetchable(uri=[i]))
src = fetcher.tarballdir+'/%s'
dest = self.chroot.chrootdir+config.CHAUTOTUA_DIR+'/jobfiles/%s'
for fetchable in job_src:
fetcher.fetch(fetchable)
src = src % fetchable.filename
dest = dest % fetchable.filename
if action == 'link':
try:
os.link(src, dest)
except OSError:
print "Chroot and Jobfiles are on different devices. Falling back to copying..."
action = 'copy'
if action == 'copy':
shutil.copyfile(src, dest)
def fetch(self):
# Job metadata stuff
## Get stage3 (if required)
print 'Fetching stage...'
fetcher = fetch.Fetcher(config.STAGE_DIR)
fetcher.fetch(self.stage)
# Sync jobtage tree
print 'Syncing jobtage tree...'
sync.Syncer().sync()
# Export from local jobtage tree
print 'Exporting jobtage tree...'
sync.Syncer(uri=config.JOBTAGE_DIR, destdir=self.jobtagedir,
rev=self.jobtagerev, scheme="git-export").sync()
## Read config, get portage snapshot if required
#self._fetch_portage_snapshot()
def prepare(self):
# Chroot setup needs to be done before parsing jobuilds
# because all parsing is done inside the chroot
print 'Setup the chroot for usage...'
self.chroot.setup()
# Create jobuild objects for parsing
for atom in self.atoms.split():
self.jobuilds.append(jobuild.Jobuild(self.jobtagedir, atom))
print 'Fetch jobuild SRC_URI and hardlink/copy into chroot'
self._setup_jobfiles()
def run(self):
processor = jobuild.Processor(None, self.chroot)
for jbld in self.jobuilds:
processor.jobuild = jbld
print 'Running jobuild "%s"' % jbld.atom
processor.run_phase('all')
def tidy(self):
print 'Tidying up..'
self.chroot.tidy()
def clean(self):
# Tidy up before cleaning
self.tidy()
shutil.rmtree(self.jobdir)
os.removedirs(osp.join(config.WORKDIR, self.maint))
if __name__ == "__main__":
job = Jobs().getjobs()[0]
job.fetch()
if os.getuid() == 0:
job.prepare()
job.run()
job.tidy()
else:
print 'You need to be root to run job.prepare(), job.run() and job.tidy()'
|