First patch should be applied on aeolus-configure, second on conductor. Patch adds new service which periodically compares data in warehouse to data in conductor and updates conductor data if there is difference. Data = templates, images, provider images, icicles. It makes sense to run this service together with new imagefactory - old imagefactory doesn't use warehouse to store objects so there is nothing to sync.
From: Jan Provaznik jprovazn@redhat.com
--- recipes/aeolus_recipe/manifests/conductor.pp | 5 +++++ 1 files changed, 5 insertions(+), 0 deletions(-)
diff --git a/recipes/aeolus_recipe/manifests/conductor.pp b/recipes/aeolus_recipe/manifests/conductor.pp index 7059e5e..edcad94 100644 --- a/recipes/aeolus_recipe/manifests/conductor.pp +++ b/recipes/aeolus_recipe/manifests/conductor.pp @@ -43,6 +43,7 @@ class aeolus::conductor inherits aeolus { require => File['/var/lib/condor/condor_config.local'] } service { ['aeolus-conductor', 'conductor-condor_refreshd', + 'conductor-warehouse_sync', 'conductor-dbomatic', 'conductor-delayed_job']: ensure => 'running', @@ -172,6 +173,7 @@ class aeolus::conductor::disabled { provider => 'yum', ensure => 'absent', require => Service['aeolus-conductor', 'conductor-condor_refreshd', + 'conductor-warehouse_sync', 'conductor-dbomatic', 'imagefactoryd', 'conductor-image_builder_service', @@ -200,10 +202,12 @@ class aeolus::conductor::disabled { enable => false, require => Service['aeolus-conductor', 'conductor-condor_refreshd', + 'conductor-warehouse_sync', 'conductor-dbomatic', 'conductor-delayed_job'] } service { ['aeolus-conductor', 'conductor-condor_refreshd', + 'conductor-warehouse_sync', 'conductor-dbomatic', 'conductor-delayed_job']: ensure => 'stopped', @@ -216,6 +220,7 @@ class aeolus::conductor::disabled { rails_env => "production", require => Service["aeolus-conductor", "conductor-condor_refreshd", + 'conductor-warehouse_sync', "conductor-dbomatic", "conductor-image_builder_service", "conductor-delayed_job"]}
From: Jan Provaznik jprovazn@redhat.com
periodically pulls data from warehouse and updates conductor's db --- aeolus-conductor.spec.in | 6 + conf/conductor-warehouse_sync | 78 ++++++++++++++++ src/app/models/template.rb | 8 ++ src/warehouse_sync/warehouse_sync-server | 89 ++++++++++++++++++ src/warehouse_sync/warehouse_sync.rb | 148 ++++++++++++++++++++++++++++++ 5 files changed, 329 insertions(+), 0 deletions(-) create mode 100755 conf/conductor-warehouse_sync create mode 100755 src/warehouse_sync/warehouse_sync-server create mode 100644 src/warehouse_sync/warehouse_sync.rb
diff --git a/aeolus-conductor.spec.in b/aeolus-conductor.spec.in index 6e44e8b..00784e0 100644 --- a/aeolus-conductor.spec.in +++ b/aeolus-conductor.spec.in @@ -126,6 +126,7 @@ mv %{buildroot}/%{app_root}/doc %{buildroot}/%{app_root}/test %{buildroot}/%{doc %{__cp} conf/aeolus-conductor %{buildroot}%{_initrddir} %{__cp} conf/conductor-dbomatic %{buildroot}%{_initrddir} %{__cp} conf/conductor-condor_refreshd %{buildroot}%{_initrddir} +%{__cp} conf/conductor-warehouse_sync %{buildroot}%{_initrddir} %{__cp} conf/conductor-image_builder_service %{buildroot}%{_initrddir} %{__cp} conf/conductor-delayed_job %{buildroot}%{_initrddir} %{__cp} conf/aeolus-conductor-httpd.conf %{buildroot}%{_sysconfdir}/httpd/conf.d/aeolus-conductor.conf @@ -157,6 +158,7 @@ touch %{buildroot}%{_localstatedir}/log/%{name}/rails.log touch %{buildroot}%{_localstatedir}/log/%{name}/dbomatic.log touch %{buildroot}%{_localstatedir}/run/%{name}/event_log_position touch %{buildroot}%{_localstatedir}/log/%{name}/condor_refreshd.log +touch %{buildroot}%{_localstatedir}/log/%{name}/warehouse_sync.log touch %{buildroot}%{_localstatedir}/log/%{name}/image_builder_service.log
[ -e %{buildroot}%{app_root}/log ] || %{__mkdir} -p %{buildroot}%{app_root}/log @@ -193,6 +195,7 @@ getent passwd aeolus >/dev/null || \ /sbin/chkconfig --add aeolus-conductor /sbin/chkconfig --add conductor-dbomatic /sbin/chkconfig --add conductor-condor_refreshd +/sbin/chkconfig --add conductor-warehouse_sync /sbin/chkconfig --add conductor-image_builder_service /sbin/chkconfig --add conductor-delayed_job
@@ -204,6 +207,8 @@ if [ $1 = 0 ]; then /sbin/chkconfig --del conductor-dbomatic /sbin/service conductor-condor_refreshd stop > /dev/null 2>&1 /sbin/chkconfig --del conductor-condor_refreshd +/sbin/service conductor-warehouse_sync stop > /dev/null 2>&1 +/sbin/chkconfig --del conductor-warehouse_sync /sbin/service conductor-image_builder_service stop > /dev/null 2>&1 /sbin/chkconfig --del conductor-image_builder_service /sbin/service conductor-delayed_job stop > /dev/null 2>&1 @@ -227,6 +232,7 @@ fi %{_initrddir}/aeolus-conductor %{_initrddir}/conductor-dbomatic %{_initrddir}/conductor-condor_refreshd +%{_initrddir}/conductor-warehouse_sync %{_initrddir}/conductor-image_builder_service %{_initrddir}/conductor-delayed_job %config(noreplace) %{_sysconfdir}/logrotate.d/%{name} diff --git a/conf/conductor-warehouse_sync b/conf/conductor-warehouse_sync new file mode 100755 index 0000000..38b9f35 --- /dev/null +++ b/conf/conductor-warehouse_sync @@ -0,0 +1,78 @@ +#!/bin/bash +# +# +# conductor-warehouse_sync startup script for conductor-warehouse_sync +# +# chkconfig: - 99 01 +# description: conductor-warehouse_sync populates the Conductor scheduler +# from the Aeolus Conductor db + +[ -r /etc/sysconfig/conductor-rails ] && . /etc/sysconfig/conductor-rails + +[ -r /etc/sysconfig/aeolus-conductor ] && . /etc/sysconfig/aeolus-conductor + +CONDUCTOR_DIR="${CONDUCTOR_DIR:-/usr/share/aeolus-conductor}" +REFRESHD_LOG="${REFRESHD_LOG:-/var/log/aeolus-conductor/warehouse_sync.log}" +REFRESHD_PID="${REFRESHD_PID:-/var/run/aeolus-conductor/warehouse_sync.pid}" +REFRESHD_LOCKFILE="${REFRESHD_LOCKFILE:-/var/lock/subsys/conductor-warehouse_sync}" +AEOLUS_USER="${AEOLUS_USER:-aeolus}" + +REFRESHD_PATH=$CONDUCTOR_DIR/warehouse_sync +REFRESHD_PROG=warehouse_sync-server + +. /etc/init.d/functions + +start() { + echo -n "Starting conductor-warehouse_sync: " + + daemon --user=$AEOLUS_USER $REFRESHD_PATH/$REFRESHD_PROG + RETVAL=$? + if [ $RETVAL -eq 0 ] && touch $REFRESHD_LOCKFILE ; then + echo_success + echo + else + echo_failure + echo + fi +} + +stop() { + echo -n "Shutting down conductor-warehouse_sync: " + killproc -p $REFRESHD_PID $REFRESHD_PROG + RETVAL=$? + if [ $RETVAL -eq 0 ] && rm -f $REFRESHD_LOCKFILE ; then + echo_success + echo + else + echo_failure + echo + fi +} + +case "$1" in + start) + start + ;; + stop) + stop + ;; + restart) + stop + start + ;; + reload) + ;; + force-reload) + restart + ;; + status) + status -p $REFRESHD_PID $REFRESHD_PROG + RETVAL=$? + ;; + *) + echo "Usage: conductor-warehouse_sync {start|stop|restart|status}" + exit 1 + ;; +esac + +exit $RETVAL diff --git a/src/app/models/template.rb b/src/app/models/template.rb index b84409c..21efc6f 100644 --- a/src/app/models/template.rb +++ b/src/app/models/template.rb @@ -73,6 +73,14 @@ class Template < ActiveRecord::Base write_attribute(:xml, xml.to_xml) end
+ def update_from_xml + self.name = xml.name + self.summary = xml.description + self.platform = xml.platform + self.platform_version = xml.platform_version + self.architecture = xml.architecture + end + def providers # TODO: rewrite cleanly ProviderImage.all( diff --git a/src/warehouse_sync/warehouse_sync-server b/src/warehouse_sync/warehouse_sync-server new file mode 100755 index 0000000..33ddb81 --- /dev/null +++ b/src/warehouse_sync/warehouse_sync-server @@ -0,0 +1,89 @@ +#!/usr/bin/ruby + +$: << File.dirname(__FILE__) + +require 'rubygems' +require 'optparse' +require 'warehouse_sync' + +DELAY = 5 +WAREHOUSE_CONFIG = YAML.load_file(File.join(File.dirname(__FILE__), "../config/image_warehouse.yml")) + +# show timestamp in log file +class Logger + def format_message(severity, timestamp, progname, msg) + time = timestamp.getutc.strftime("%Y-%m-%d %H:%M:%S") + "\n#{time}:#{severity.to_s.upcase}: #{msg}" + end +end + +help = false +daemon = true +log_dir = "/var/log/aeolus-conductor" +pid_dir = "/var/run/aeolus-conductor" + +optparse = OptionParser.new do |opts| + + opts.banner = <<BANNER +Usage: +warehouse_sync [options] + +Options: +BANNER + opts.on( '-f', '--pid-file PATH', "Use PATH to the warehouse_sync pid directory (defaults to #{pid_dir})") do |newpath| + pid_dir = newpath + end + opts.on( '-h', '--help', '') { help = true } + opts.on( '-l', '--log PATH', "Use PATH to the warehouse_sync log directory (defaults to #{log_dir}). Use '-' for stdout") do |newpath| + log_dir = newpath + end + opts.on( '-n', '--nodaemon', 'Do not daemonize (useful in combination with -l for debugging)') { daemon = false } +end + +begin + optparse.parse! +rescue OptionParser::InvalidOption => e + puts "Invalid option #{e.args}" + puts + puts optparse + exit(1) +end + +if help + puts optparse + exit(0) +end + +if log_dir == '-' + LOG_FILE = STDOUT +else + LOG_FILE = "#{log_dir}/warehouse_sync.log" +end + +logger = Logger.new(LOG_FILE) +logger.level = Logger::DEBUG +logger.datetime_format = "%Y-%m-%d %H:%M:%S" +logger.info "Warehouse_sync starting up" + +def create_pid_file(file) + FileUtils.mkdir_p File.dirname(file) + open(file, "w") {|f| f.write(Process.pid) } + File.chmod(0644, file) +end + +# daemonize +if daemon + # note that this requires 'active_support', which we get for free from dutils + Process.daemon +end + +begin + create_pid_file("#{pid_dir}/warehouse_sync.pid") + WarehouseSync.new(:uri => WAREHOUSE_CONFIG['baseurl'], :logger => logger, :delay => DELAY * 60).run +rescue Exception => e + logger.error "#{e.backtrace.shift}: #{e.message}" + e.backtrace.each do |step| + logger.error "\tfrom #{step}" + end + exit 1 +end diff --git a/src/warehouse_sync/warehouse_sync.rb b/src/warehouse_sync/warehouse_sync.rb new file mode 100644 index 0000000..169d619 --- /dev/null +++ b/src/warehouse_sync/warehouse_sync.rb @@ -0,0 +1,148 @@ +$: << File.join(File.dirname(__FILE__), "../dutils") + +require 'rubygems' +require 'dutils' +require 'warehouse_client' + +class WarehouseSync + class NotFoundError < Exception;end + + def initialize(opts) + @uri = opts[:uri] + @delay = opts[:delay] || 10*60 + @logger = opts[:logger] + @whouse = Warehouse::Client.new(@uri) + end + + def run + while true + begin + @logger.debug "---------------------------------------" + pull_templates + pull_images + pull_provider_images + rescue => e + @logger.error e.message + @logger.error "backtrace:\n" + e.backtrace.join("\n ") + ensure + @logger.debug "sleep #{@delay}" + sleep @delay + end + end + end + + def pull_templates + @logger.debug "*** Getting templates" + @whouse.bucket('templates').objects.each do |bucket_obj| + safely_process(bucket_obj) do |obj| + attrs = obj.attrs([:uuid]) + #tpl = Template.find_by_uuid(attrs[:uuid]) || Template.new(:uuid => attrs[:uuid]) + unless tpl = Template.find_by_uuid(attrs[:uuid]) + raise NotFoundError, "Template with uuid #{attrs[:uuid]} not found" + end + tpl.xml = obj.body + tpl.update_from_xml + update_changes(tpl) + end + end + end + + def pull_images + @logger.debug "*** Getting images" + @whouse.bucket('images').objects.each do |bucket_obj| + safely_process(bucket_obj) do |obj| + attrs = obj.attrs([:uuid, :target, :template]) + #img = Image.find_by_uuid(attrs[:uuid]) || Image.new(:uuid => attrs[:uuid]) + unless img = Image.find_by_uuid(attrs[:uuid]) + raise NotFoundError, "image with uuid #{attrs[:uuid]} not found" + end + unless attrs[:target] + raise "target uuid is not set" + end + unless ptype = ProviderType.find_by_codename(attrs[:target]) + raise "provider type #{attrs[:target]} not found" + end + unless attrs[:template] + raise "template uuid is not set" + end + unless tpl = Template.find_by_uuid(attrs[:template]) + raise "Template with uuid #{attrs[:template]} not found" + end + img.provider_type_id = ptype.id + img.template_id = tpl.id + update_changes(img) + end + end + end + + def pull_provider_images + @logger.debug "*** Getting provider images" + @whouse.bucket('provider_images').objects.each do |bucket_obj| + safely_process(bucket_obj) do |obj| + attrs = obj.attrs([:uuid, :image, :icicle]) + # we don't allow create non-existing ProviderImage in conductor because + # we don't know provider_id (provider attribute contains only url or + # string which is not unique in conductor) + unless pimg = ProviderImage.find_by_uuid(attrs[:uuid]) + raise NotFoundError, "provider image with uuid #{attrs[:uuid]} not found" + end + unless attrs[:image] + raise "image uuid is not set" + end + unless img = Image.find_by_uuid(attrs[:image]) + raise "image with uuid #{attrs[:image]} not found" + end + unless attrs[:icicle] + raise "icicle uuid is not set" + end + pimg.image_id = img.id + pimg.icicle = pull_provider_image_icicle(attrs[:icicle]) + # TODO: set target_identifier too? + update_changes(img) + end + end + end + + private + + def pull_provider_image_icicle(uuid) + @logger.debug " getting provider image icicle with uuid #{uuid}" + whouse_icicle = @whouse.bucket('icicles').object(uuid) + icicle = Icicle.find_by_uuid(uuid) || Icicle.new(:uuid => uuid) + icicle.xml = whouse_icicle.body + icicle.uuid = uuid + update_changes(icicle) + icicle + end + + def safely_process(obj) + begin + yield obj + rescue NotFoundError => e + @logger.error "Skipping #{obj.key} - not found in DB" + rescue => e + @logger.error "Error while processing #{obj.key} (skipping): #{e.message}" + @logger.error e.backtrace.join("\n ") + end + end + + def update_changes(obj) + if obj.new_record? + @logger.debug "#{obj.class.class_name} #{obj.uuid} is not in DB, saving" + obj.save! + elsif obj.changed? + @logger.debug "#{obj.class.class_name} #{obj.uuid} has been changed:" + log_changes(obj) + obj.save! + else + @logger.debug "#{obj.class.class_name} #{obj.uuid} is without changes" + end + end + + def log_changes(obj) + obj.changed.each do |attr| + @logger.debug "old #{attr}: #{obj.send(attr + '_was')}" + @logger.debug "new #{attr}: #{obj[attr]}" + end + end +end
On Mon, 2011-03-14 at 14:11 +0100, jprovazn@redhat.com wrote:
From: Jan Provaznik jprovazn@redhat.com
periodically pulls data from warehouse and updates conductor's db
I think this was not pushed yet (dont see it in git) and we need it to get the right information to launch an instance from an image the new factory built (as far as I know). Note that I have not tested this, but was looking through based on an issue tsedovic reported today where the provider_image had no provider_image_key - this script should be setting that now, I believe. Snipped below way down to show where I think it goes.
diff --git a/src/warehouse_sync/warehouse_sync.rb b/src/warehouse_sync/warehouse_sync.rb new file mode 100644 index 0000000..169d619 --- /dev/null +++ b/src/warehouse_sync/warehouse_sync.rb @@ -0,0 +1,148 @@ +$: << File.join(File.dirname(__FILE__), "../dutils")
+require 'rubygems' +require 'dutils' +require 'warehouse_client'
+class WarehouseSync
- class NotFoundError < Exception;end
<snip>
- def pull_provider_images
- @logger.debug "*** Getting provider images"
- @whouse.bucket('provider_images').objects.each do |bucket_obj|
safely_process(bucket_obj) do |obj|
attrs = obj.attrs([:uuid, :image, :icicle])
# we don't allow create non-existing ProviderImage in conductor because
# we don't know provider_id (provider attribute contains only url or
# string which is not unique in conductor)
unless pimg = ProviderImage.find_by_uuid(attrs[:uuid])
raise NotFoundError, "provider image with uuid #{attrs[:uuid]} not found"
end
unless attrs[:image]
raise "image uuid is not set"
end
unless img = Image.find_by_uuid(attrs[:image])
raise "image with uuid #{attrs[:image]} not found"
end
unless attrs[:icicle]
raise "icicle uuid is not set"
end
pimg.image_id = img.id
pimg.icicle = pull_provider_image_icicle(attrs[:icicle])
# TODO: set target_identifier too?
This ^ is what we need set I believe, since condor looks for the provider_image_key, which appears to be the ami id in the ec2 case.
update_changes(img)
end
- end
- end
- private
- def pull_provider_image_icicle(uuid)
- @logger.debug " getting provider image icicle with uuid #{uuid}"
- whouse_icicle = @whouse.bucket('icicles').object(uuid)
- icicle = Icicle.find_by_uuid(uuid) || Icicle.new(:uuid => uuid)
- icicle.xml = whouse_icicle.body
- icicle.uuid = uuid
- update_changes(icicle)
- icicle
- end
- def safely_process(obj)
- begin
yield obj
- rescue NotFoundError => e
@logger.error "Skipping #{obj.key} - not found in DB"
- rescue => e
@logger.error "Error while processing #{obj.key} (skipping): #{e.message}"
@logger.error e.backtrace.join("\n ")
- end
- end
- def update_changes(obj)
- if obj.new_record?
@logger.debug "#{obj.class.class_name} #{obj.uuid} is not in DB, saving"
obj.save!
- elsif obj.changed?
@logger.debug "#{obj.class.class_name} #{obj.uuid} has been changed:"
log_changes(obj)
obj.save!
- else
@logger.debug "#{obj.class.class_name} #{obj.uuid} is without changes"
- end
- end
- def log_changes(obj)
- obj.changed.each do |attr|
@logger.debug "old #{attr}: #{obj.send(attr + '_was')}"
@logger.debug "new #{attr}: #{obj[attr]}"
- end
- end
+end
aeolus-devel@lists.fedorahosted.org