Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions config/cloud_controller.yml
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,12 @@ directories:
diagnostics: /tmp

stacks_file: config/stacks.yml

storage_cli_config_file_droplets: config/storage_cli_config_droplets.json
storage_cli_config_file_packages: config/storage_cli_config_packages.json
storage_cli_config_file_buildpacks: config/storage_cli_config_buildpacks.json
storage_cli_config_file_resource_pool: config/storage_cli_config_resource_pool.json

newrelic_enabled: false

max_annotations_per_resource: 200
Expand Down
18 changes: 9 additions & 9 deletions lib/cloud_controller/blobstore/client_provider.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def self.provide(options:, directory_key:, root_dir: nil, resource_type: nil)
provide_fog(options, directory_key, root_dir)
elsif options[:blobstore_type] == 'storage-cli'
# storage-cli is an experimental feature and not yet fully implemented. !!! DO NOT USE IN PRODUCTION !!!
provide_storage_cli(options, directory_key, root_dir)
provide_storage_cli(options, directory_key, root_dir, resource_type)
else
provide_webdav(options, directory_key, root_dir)
end
Expand Down Expand Up @@ -71,14 +71,14 @@ def provide_webdav(options, directory_key, root_dir)
Client.new(SafeDeleteClient.new(retryable_client, root_dir))
end

def provide_storage_cli(options, directory_key, root_dir)
raise BlobstoreError.new('connection_config for storage-cli is not provided') unless options[:connection_config]

client = StorageCliClient.build(connection_config: options.fetch(:connection_config),
directory_key: directory_key,
root_dir: root_dir,
min_size: options[:minimum_size],
max_size: options[:maximum_size])
def provide_storage_cli(options, directory_key, root_dir, resource_type)
client = StorageCliClient.build(
directory_key: directory_key,
resource_type: resource_type,
root_dir: root_dir,
min_size: options[:minimum_size],
max_size: options[:maximum_size]
)

logger = Steno.logger('cc.blobstore.storage_cli_client')
errors = [StandardError]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,6 @@ def cli_path
ENV['AZURE_STORAGE_CLI_PATH'] || '/var/vcap/packages/azure-storage-cli/bin/azure-storage-cli'
end

def build_config(connection_config)
{
account_name: connection_config[:azure_storage_account_name],
account_key: connection_config[:azure_storage_access_key],
container_name: @directory_key,
environment: connection_config[:environment]
}.compact
end

CloudController::Blobstore::StorageCliClient.register('AzureRM', AzureStorageCliClient)
end
end
Expand Down
125 changes: 75 additions & 50 deletions lib/cloud_controller/blobstore/storage_cli/storage_cli_client.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,29 +16,88 @@ class << self
attr_reader :registry

def register(provider, klass)
registry[provider] = klass
registry[provider.to_s] = klass
end

def build(connection_config:, directory_key:, root_dir:, min_size: nil, max_size: nil)
provider = connection_config[:provider]
raise 'Missing connection_config[:provider]' if provider.nil?
def build(directory_key:, root_dir:, resource_type: nil, min_size: nil, max_size: nil)
raise 'Missing resource_type' if resource_type.nil?

impl_class = registry[provider]
cfg = fetch_config(resource_type)
provider = cfg['provider']

key = provider.to_s
impl_class = registry[key] || registry[key.downcase] || registry[key.upcase]
raise "No storage CLI client registered for provider #{provider}" unless impl_class

impl_class.new(connection_config:, directory_key:, root_dir:, min_size:, max_size:)
impl_class.new(provider: provider, directory_key: directory_key, root_dir: root_dir, resource_type: resource_type, min_size: min_size, max_size: max_size,
config_path: config_path_for(resource_type))
end

RESOURCE_TYPE_KEYS = {
'droplets' => :storage_cli_config_file_droplets,
'buildpack_cache' => :storage_cli_config_file_droplets,
'buildpacks' => :storage_cli_config_file_buildpacks,
'packages' => :storage_cli_config_file_packages,
'resource_pool' => :storage_cli_config_file_resource_pool
}.freeze

def fetch_config(resource_type)
path = config_path_for(resource_type)
validate_config_path!(path)

json = fetch_json(path)
validate_json_object!(json, path)
validate_required_keys!(json, path)

json
end

def config_path_for(resource_type)
normalized = resource_type.to_s
key = RESOURCE_TYPE_KEYS.fetch(normalized) do
raise BlobstoreError.new("Unknown resource_type: #{resource_type}")
end
VCAP::CloudController::Config.config.get(key)
end

def fetch_json(path)
Oj.load(File.read(path))
rescue Oj::ParseError, EncodingError => e
raise BlobstoreError.new("Failed to parse storage-cli JSON at #{path}: #{e.message}")
end

def validate_config_path!(path)
return if path && File.file?(path) && File.readable?(path)

raise BlobstoreError.new("Storage-cli config file not found or not readable at: #{path.inspect}")
end

def validate_json_object!(json, path)
raise BlobstoreError.new("Config at #{path} must be a JSON object") unless json.is_a?(Hash)
end

def validate_required_keys!(json, path)
provider = json['provider'].to_s.strip
raise BlobstoreError.new("No provider specified in config file: #{path.inspect}") if provider.empty?

required = %w[account_key account_name container_name environment]
missing = required.reject { |k| json.key?(k) && !json[k].to_s.strip.empty? }
return if missing.empty?

raise BlobstoreError.new("Missing required keys in #{path}: #{missing.join(', ')}")
end
end

def initialize(connection_config:, directory_key:, root_dir:, min_size: nil, max_size: nil)
def initialize(provider:, directory_key:, resource_type:, root_dir:, config_path:, min_size: nil, max_size: nil)
@cli_path = cli_path
@directory_key = directory_key
@resource_type = resource_type.to_s
@root_dir = root_dir
@min_size = min_size || 0
@max_size = max_size
config = build_config(connection_config)
@config_file = write_config_file(config)
@fork = connection_config.fetch(:fork, false)
@provider = provider
@config_file = config_path
logger.info('storage_cli_config_selected', resource_type: @resource_type, path: @config_file)
end

def local?
Expand Down Expand Up @@ -88,28 +147,16 @@ def cp_to_blobstore(source_path, destination_key)
end

def cp_file_between_keys(source_key, destination_key)
if @fork
run_cli('copy', partitioned_key(source_key), partitioned_key(destination_key))
else
# Azure CLI doesn't support server-side copy yet, so fallback to local copy
Tempfile.create('blob-copy') do |tmp|
download_from_blobstore(source_key, tmp.path)
cp_to_blobstore(tmp.path, destination_key)
end
end
run_cli('copy', partitioned_key(source_key), partitioned_key(destination_key))
end

def delete_all(_=nil)
# page_size is currently not considered. Azure SDK / API has a limit of 5000
pass unless @fork

# Currently, storage-cli does not support bulk deletion.
run_cli('delete-recursive', @root_dir)
end

def delete_all_in_path(path)
pass unless @fork

# Currently, storage-cli does not support bulk deletion.
run_cli('delete-recursive', partitioned_key(path))
end
Expand All @@ -123,29 +170,19 @@ def delete_blob(blob)
end

def blob(key)
if @fork
properties = properties(key)
return nil if properties.nil? || properties.empty?

signed_url = sign_url(partitioned_key(key), verb: 'get', expires_in_seconds: 3600)
StorageCliBlob.new(key, properties:, signed_url:)
elsif exists?(key)
# Azure CLI does not support getting blob properties directly, so fallback to local check
signed_url = sign_url(partitioned_key(key), verb: 'get', expires_in_seconds: 3600)
StorageCliBlob.new(key, signed_url:)
end
properties = properties(key)
return nil if properties.nil? || properties.empty?

signed_url = sign_url(partitioned_key(key), verb: 'get', expires_in_seconds: 3600)
StorageCliBlob.new(key, properties:, signed_url:)
end

def files_for(prefix, _ignored_directory_prefixes=[])
return nil unless @fork

files, _status = run_cli('list', prefix)
files.split("\n").map(&:strip).reject(&:empty?).map { |file| StorageCliBlob.new(file) }
end

def ensure_bucket_exists
return unless @fork

run_cli('ensure-bucket-exists')
end

Expand Down Expand Up @@ -194,18 +231,6 @@ def build_config(connection_config)
raise NotImplementedError
end

def write_config_file(config)
# TODO: Consider to move the config generation into capi-release
config_dir = File.join(tmpdir, 'blobstore-configs')
FileUtils.mkdir_p(config_dir)

config_file_path = File.join(config_dir, "#{@directory_key}.json")
File.open(config_file_path, 'w', 0o600) do |f|
f.write(Oj.dump(config.transform_keys(&:to_s)))
end
config_file_path
end

def tmpdir
VCAP::CloudController::Config.config.get(:directories, :tmpdir)
rescue StandardError
Expand Down
6 changes: 6 additions & 0 deletions lib/cloud_controller/config_schemas/api_schema.rb
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,12 @@ class ApiSchema < VCAP::Config
},

stacks_file: String,

optional(:storage_cli_config_file_buildpacks) => String,
optional(:storage_cli_config_file_packages) => String,
optional(:storage_cli_config_file_resource_pool) => String,
optional(:storage_cli_config_file_droplets) => String,

newrelic_enabled: bool,

optional(:max_migration_duration_in_minutes) => Integer,
Expand Down
5 changes: 5 additions & 0 deletions lib/cloud_controller/config_schemas/clock_schema.rb
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,11 @@ class ClockSchema < VCAP::Config

pid_filename: String, # Pid filename to use

optional(:storage_cli_config_file_buildpacks) => String,
optional(:storage_cli_config_file_packages) => String,
optional(:storage_cli_config_file_resource_pool) => String,
optional(:storage_cli_config_file_droplets) => String,

newrelic_enabled: bool,

optional(:max_migration_duration_in_minutes) => Integer,
Expand Down
6 changes: 6 additions & 0 deletions lib/cloud_controller/config_schemas/worker_schema.rb
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,12 @@ class WorkerSchema < VCAP::Config
},

stacks_file: String,

optional(:storage_cli_config_file_buildpacks) => String,
optional(:storage_cli_config_file_packages) => String,
optional(:storage_cli_config_file_resource_pool) => String,
optional(:storage_cli_config_file_droplets) => String,

newrelic_enabled: bool,

optional(:max_migration_duration_in_minutes) => Integer,
Expand Down
6 changes: 4 additions & 2 deletions lib/cloud_controller/dependency_locator.rb
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,8 @@ def legacy_global_app_bits_cache

Blobstore::ClientProvider.provide(
options: options,
directory_key: options.fetch(:resource_directory_key)
directory_key: options.fetch(:resource_directory_key),
resource_type: :resource_pool
)
end

Expand All @@ -177,7 +178,8 @@ def global_app_bits_cache
Blobstore::ClientProvider.provide(
options: options,
directory_key: options.fetch(:resource_directory_key),
root_dir: RESOURCE_POOL_DIR
root_dir: RESOURCE_POOL_DIR,
resource_type: :resource_pool
)
end

Expand Down
3 changes: 2 additions & 1 deletion lib/cloud_controller/resource_pool.rb
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ def initialize(config)
@blobstore = CloudController::Blobstore::ClientProvider.provide(
options: options,
directory_key: options.fetch(:resource_directory_key),
root_dir: CloudController::DependencyLocator::RESOURCE_POOL_DIR
root_dir: CloudController::DependencyLocator::RESOURCE_POOL_DIR,
resource_type: 'resource_pool'
)

@minimum_size = options[:minimum_size] || 0 # TODO: move default into config object?
Expand Down
27 changes: 21 additions & 6 deletions spec/unit/lib/cloud_controller/blobstore/client_provider_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -129,22 +129,37 @@ module Blobstore
context 'when storage-cli is requested' do
let(:blobstore_type) { 'storage-cli' }
let(:directory_key) { 'some-bucket' }
let(:resource_type) { 'droplets' }
let(:root_dir) { 'some-root-dir' }
let(:storage_cli_client_mock) { class_double(CloudController::Blobstore::StorageCliClient) }
let(:tmpdir) { Dir.mktmpdir('storage_cli_spec') }
let(:config_path) { File.join(tmpdir, 'storage_cli_config_droplets.json') }

before do
options.merge!(connection_config: {}, minimum_size: 100, maximum_size: 1000)
File.write(config_path, '{"provider": "AzureRM",
"account_name": "some-account-name",
"account_key": "some-access-key",
"container_name": "directory_key",
"environment": "AzureCloud" }')
allow(VCAP::CloudController::Config.config).to receive(:get).with(:storage_cli_config_file_droplets).and_return(config_path)
options.merge!(provider: 'AzureRM', minimum_size: 100, maximum_size: 1000)
end

it 'provides a storage-cli client' do
allow(StorageCliClient).to receive(:build).and_return(storage_cli_client_mock)
ClientProvider.provide(options:, directory_key:, root_dir:)
expect(StorageCliClient).to have_received(:build).with(connection_config: {}, directory_key: directory_key, root_dir: root_dir, min_size: 100, max_size: 1000)
ClientProvider.provide(options:, directory_key:, root_dir:, resource_type:)
expect(StorageCliClient).to have_received(:build).with(directory_key: directory_key, resource_type: resource_type, root_dir: root_dir,
min_size: 100, max_size: 1000)
end

it 'raises an error if connection_config is not provided' do
options.delete(:connection_config)
expect { ClientProvider.provide(options:, directory_key:, root_dir:) }.to raise_error(BlobstoreError, 'connection_config for storage-cli is not provided')
it 'raises an error if provider is not provided' do
config_path = VCAP::CloudController::Config.config.get(:storage_cli_config_file_droplets)
File.write(config_path,
'{"provider": "", "account_name": "some-account-name", "account_key": "some-access-key", "container_name": "directory_key", "environment": "AzureCloud" }')
expect { ClientProvider.provide(options:, directory_key:, root_dir:, resource_type:) }.to raise_error(BlobstoreError) { |e|
expect(e.message).to include('No provider specified in config file:')
expect(e.message).to include(File.basename(config_path))
}
end
end
end
Expand Down
Loading