radian 1 éve
szülő
commit
113ce7258b

+ 0 - 16
pom.xml

@@ -203,22 +203,6 @@
 		    <artifactId>sshj</artifactId>
 		</dependency>
 		
-		<!-- com.google -->
-		<dependency>
-			<groupId>com.google.cloud</groupId>
-			<artifactId>spring-cloud-gcp-starter</artifactId>
-		</dependency>
-
-		<dependency>
-			<groupId>com.google.cloud</groupId>
-			<artifactId>spring-cloud-gcp-starter-storage</artifactId>
-		</dependency>
-  				  		
-		<dependency>
-		    <groupId>com.google.cloud</groupId>
-		    <artifactId>spring-cloud-gcp-starter-bigquery</artifactId>
-		</dependency>
-  		  				  				
 		<dependency>
 			<groupId>com.google.code.gson</groupId>
 			<artifactId>gson</artifactId>

+ 21 - 46
src/main/java/kr/co/iya/base/config/AidBaseConfig.java

@@ -4,7 +4,6 @@ import javax.servlet.MultipartConfigElement;
 
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
-import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
 import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication;
 import org.springframework.boot.web.servlet.MultipartConfigFactory;
 import org.springframework.context.annotation.Bean;
@@ -14,58 +13,52 @@ import org.springframework.core.env.Environment;
 import org.springframework.transaction.PlatformTransactionManager;
 import org.springframework.util.unit.DataSize;
 
-import kr.co.iya.base.config.KafkaConfig.KafkaProperties;
 import kr.co.iya.base.context.MaskContext;
 import kr.co.iya.base.support.aid.AwareAidPack;
-import kr.co.iya.base.support.aid.BeanAidPack;
-import kr.co.iya.base.support.aid.RequestAttributesAidPack;
 import kr.co.iya.base.support.aid.AwareAidPack.ApplicationContextAid;
+import kr.co.iya.base.support.aid.BeanAidPack;
 import kr.co.iya.base.support.aid.BeanAidPack.BeanAid;
+import kr.co.iya.base.support.aid.RequestAttributesAidPack;
 import kr.co.iya.base.support.aid.RequestAttributesAidPack.RequestAttributesAid;
 import kr.co.iya.base.support.util.Base64UtilPack;
-import kr.co.iya.base.support.util.BashProcessUtilPack;
-import kr.co.iya.base.support.util.CompressUtilPack;
-import kr.co.iya.base.support.util.CryptoUtilPack;
-import kr.co.iya.base.support.util.FileUtilPack;
-import kr.co.iya.base.support.util.GcpUtilPack;
-import kr.co.iya.base.support.util.HttpRequestUtilPack;
-import kr.co.iya.base.support.util.JsonUtilPack;
-import kr.co.iya.base.support.util.KafkaUtilPack;
-import kr.co.iya.base.support.util.LobConverterUtilPack;
-import kr.co.iya.base.support.util.LocaleUtilPack;
-import kr.co.iya.base.support.util.MaskUtilPack;
-import kr.co.iya.base.support.util.MessageUtilPack;
-import kr.co.iya.base.support.util.ObjectMapperUtilPack;
-import kr.co.iya.base.support.util.PropertiesUtilPack;
-import kr.co.iya.base.support.util.RestTemplateUtilPack;
-import kr.co.iya.base.support.util.SessionUtilPack;
-import kr.co.iya.base.support.util.SftpUtilPack;
-import kr.co.iya.base.support.util.SystemUtilPack;
-import kr.co.iya.base.support.util.ThreadUtilPack;
-import kr.co.iya.base.support.util.TimeUtilPack;
-import kr.co.iya.base.support.util.TransactionUtilPack;
 import kr.co.iya.base.support.util.Base64UtilPack.Base64Util;
+import kr.co.iya.base.support.util.BashProcessUtilPack;
 import kr.co.iya.base.support.util.BashProcessUtilPack.BashProcessUtil;
+import kr.co.iya.base.support.util.CompressUtilPack;
 import kr.co.iya.base.support.util.CompressUtilPack.CompressUtil;
+import kr.co.iya.base.support.util.CryptoUtilPack;
 import kr.co.iya.base.support.util.CryptoUtilPack.CryptoUtil;
+import kr.co.iya.base.support.util.FileUtilPack;
 import kr.co.iya.base.support.util.FileUtilPack.FileUtil;
-import kr.co.iya.base.support.util.GcpUtilPack.GcpUtil;
+import kr.co.iya.base.support.util.HttpRequestUtilPack;
 import kr.co.iya.base.support.util.HttpRequestUtilPack.HttpRequestUtil;
+import kr.co.iya.base.support.util.JsonUtilPack;
 import kr.co.iya.base.support.util.JsonUtilPack.JsonUtil;
-import kr.co.iya.base.support.util.KafkaUtilPack.KafkaMeta;
-import kr.co.iya.base.support.util.KafkaUtilPack.KafkaUtil;
+import kr.co.iya.base.support.util.LobConverterUtilPack;
 import kr.co.iya.base.support.util.LobConverterUtilPack.LobConverterUtil;
+import kr.co.iya.base.support.util.LocaleUtilPack;
 import kr.co.iya.base.support.util.LocaleUtilPack.LocaleUtil;
+import kr.co.iya.base.support.util.MaskUtilPack;
 import kr.co.iya.base.support.util.MaskUtilPack.MaskUtil;
+import kr.co.iya.base.support.util.MessageUtilPack;
 import kr.co.iya.base.support.util.MessageUtilPack.MessageUtil;
+import kr.co.iya.base.support.util.ObjectMapperUtilPack;
 import kr.co.iya.base.support.util.ObjectMapperUtilPack.ObjectMapperUtil;
+import kr.co.iya.base.support.util.PropertiesUtilPack;
 import kr.co.iya.base.support.util.PropertiesUtilPack.PropertiesUtil;
+import kr.co.iya.base.support.util.RestTemplateUtilPack;
 import kr.co.iya.base.support.util.RestTemplateUtilPack.RestTemplateUtil;
+import kr.co.iya.base.support.util.SessionUtilPack;
 import kr.co.iya.base.support.util.SessionUtilPack.SessionUtil;
+import kr.co.iya.base.support.util.SftpUtilPack;
 import kr.co.iya.base.support.util.SftpUtilPack.SftpUtil;
+import kr.co.iya.base.support.util.SystemUtilPack;
 import kr.co.iya.base.support.util.SystemUtilPack.SystemUtil;
+import kr.co.iya.base.support.util.ThreadUtilPack;
 import kr.co.iya.base.support.util.ThreadUtilPack.ThreadUtil;
+import kr.co.iya.base.support.util.TimeUtilPack;
 import kr.co.iya.base.support.util.TimeUtilPack.TimeUtil;
+import kr.co.iya.base.support.util.TransactionUtilPack;
 import kr.co.iya.base.support.util.TransactionUtilPack.TransactionUtil;
 
 @Configuration
@@ -130,11 +123,6 @@ public class AidBaseConfig {
 		return BashProcessUtilPack.getBashProcessUtil();
 	}
     
-    @Bean
-    @ConditionalOnExpression("T(kr.co.iya.base.GcpContext).isEnabled()")
-    GcpUtil getGcpUtil() {   	
-		return GcpUtilPack.getGcpUtil(getBeanAid());
-	}
 
     @Bean
     CompressUtil getCompressUtil() {
@@ -162,19 +150,6 @@ public class AidBaseConfig {
 		return JsonUtilPack.getJsonUtil(true);
 	}
 
-    @Bean
-    @ConditionalOnExpression("T(kr.co.iya.base.KafkaConfig).isEnabled()")
-    @ConditionalOnProperty(name="spring.kafka.enabled", havingValue="true", matchIfMissing=false)
-    KafkaUtil getKafkaUtil() {
-    	return KafkaUtilPack.getKafkaUtil(KafkaMeta.builder()
-			.applicationName(applicationName)
-	    	.properties(this.getBeanAid().getBean(KafkaProperties.class))
-	    	.objectMapperUtil(getObjectMapperUtil())
-	    	.jsonUtil(getJsonUtil())
-	    	.systemUtil(getSystemUtil())
-	    	.timeUtil(getTimeUtil())
-	    	.build());
-	}
 
     @Bean
     LobConverterUtil getLobConverterUtil() {

+ 0 - 5
src/main/java/kr/co/iya/base/config/DataSourceConfig.java

@@ -24,13 +24,10 @@ import lombok.extern.slf4j.Slf4j;
 @ConditionalOnExpression("T(kr.co.iya.base.DatasourceConfig).isEnabled()")
 public class DataSourceConfig {
 
-    //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
     @Bean(name="default.hikariConfig")
     @ConfigurationProperties(prefix="spring.datasource.hikari")
     HikariConfig hikariConfig() {
     	log.debug(">> default.hikariConfig");
-    	
 		return new HikariConfig();
 	}
 
@@ -54,6 +51,4 @@ public class DataSourceConfig {
 		return txManager;
 	}
 
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-	
 }

+ 0 - 225
src/main/java/kr/co/iya/base/config/KafkaConfig.java

@@ -1,225 +0,0 @@
-package kr.co.iya.base.config;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.kafka.clients.consumer.ConsumerConfig;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.producer.ProducerConfig;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.header.internals.RecordHeader;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.apache.kafka.common.serialization.StringSerializer;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
-import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.kafka.annotation.EnableKafka;
-import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
-import org.springframework.kafka.core.DefaultKafkaProducerFactory;
-import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.core.ProducerFactory;
-import org.springframework.kafka.listener.RecordInterceptor;
-import org.springframework.kafka.listener.adapter.RecordFilterStrategy;
-import org.springframework.kafka.support.SendResult;
-import org.springframework.kafka.support.serializer.JsonSerializer;
-import org.springframework.stereotype.Component;
-import org.springframework.util.concurrent.ListenableFutureCallback;
-
-import kr.co.iya.base.support.AbstractMeta;
-import lombok.extern.slf4j.Slf4j;
-
-@Slf4j
-@EnableKafka
-@Configuration
-@ConditionalOnExpression("T(kr.co.iya.base.KafkaConfig).isEnabled()")
-@ConditionalOnProperty(name="spring.kafka.enabled", havingValue="true", matchIfMissing=false)
-public class KafkaConfig extends AbstractMeta {
-
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-	
-	@Component
-	@ConfigurationProperties(prefix="spring.kafka")
-	public static class KafkaProperties{
-		private boolean enabled;
-		private String applicationName;
-		private String bootstrapServers;
-		
-		public boolean isEnabled() {return enabled;}
-		public void setEnabled(boolean enabled) {this.enabled = enabled;}
-		
-		public String getApplicationName() {return applicationName;}
-		public void setApplicationName(String applicationName) {this.applicationName = applicationName;}
-
-		public String getBootstrapServers() {return bootstrapServers;}
-		public void setBootstrapServers(String bootstrapServers) {this.bootstrapServers = bootstrapServers;}		
-	}
-	
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-	@Autowired
-	private KafkaProperties properties;
-
-    @Bean
-    KafkaPublisher<String> kafkaPublisherString() {
-		return new KafkaPublisher<String>() {
-			@Override
-			public void doSend(String topic, String key, String payload, Map<String, String> header, ListenableFutureCallback<? super SendResult<String, String>> callback) {
-				this.kafkaTemplateStringValue().send(this.makeProducerRecord(topic, key, payload, header)).addCallback(callback!=null?callback:this.callback());
-			}
-		};
-	}
-
-    @Bean
-    ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
-        Map<String, Object> configs = new HashMap<>();
-        configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.properties.getBootstrapServers());
-        configs.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
-        configs.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
-        configs.put(ConsumerConfig.FETCH_MAX_BYTES_CONFIG, "10485760");
-
-        ConcurrentKafkaListenerContainerFactory<String,String> factory = new ConcurrentKafkaListenerContainerFactory<>();
-	    factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(configs));	    
-
-	    factory.setRecordFilterStrategy(new RecordFilterStrategy<String,String>() {
-	    	@Override public boolean filter(ConsumerRecord<String,String> record) {
-	    		log.debug(">> kafkaListenerContainerFactory:filter");
-	    		return false;
-	    	}
-	    });      
-	    factory.setRecordInterceptor(new RecordInterceptor<String,String>(){
-			@Override public ConsumerRecord<String,String> intercept(ConsumerRecord<String,String> record) {
-				log.debug(">> kafkaListenerContainerFactory:intercept");
-				return record;
-			}
-	    });
-	    
-        return factory;
-    }	
-	
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-    
-    public static class KafkaTemplateContext {
-    	private static Map<Object,KafkaTemplate<String,?>> context = new HashMap<>();
-
-    	public static boolean isExist(Object obj){
-    		return context.containsKey(obj);
-    	}
-    	
-    	public static KafkaTemplate<String,?> getKafkaTemplate(Object obj){
-    		if(!isExist(obj)) {return null;}
-    		KafkaTemplate<String,?> kafkaTemplate = context.get(obj);
-    		log.debug(">> obj.hashCode():{},kafkaTemplate.hashCode():{}",obj.hashCode(),kafkaTemplate.hashCode());
-    		return kafkaTemplate;    		
-    	}
-    	
-    	public static synchronized void setKafkaTemplate(Object obj,KafkaTemplate<String,?> kafkaTemplate){
-			context.put(obj, kafkaTemplate);
-    	}
-    	
-    }
-    
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-	public interface KafkaPublisher<T> {
-	    public void doSend(String topic, String key, T payload, Map<String, String> header, ListenableFutureCallback<? super SendResult<String, T>> callback);
-
-	    default public void send(String topic, T payload) {this.doSend(topic, null, payload, null, null);}		    
-	    default public void send(String topic, T payload, Map<String,String> header) {this.doSend(topic, null, payload, header, null);} 		    
-	    default public void send(String topic, T payload, ListenableFutureCallback<? super SendResult<String, T>> callback) {this.doSend(topic, null, payload, null, callback);}
-	    default public void send(String topic, T payload, Map<String, String> header, ListenableFutureCallback<? super SendResult<String, T>> callback) {this.doSend(topic, null, payload, header, callback);}
-
-	    default public void send(String topic, String key, T payload) {this.doSend(topic, key, payload, null, null);}			
-	    default public void send(String topic, String key, T payload, Map<String,String> header) {this.doSend(topic, key, payload, header, null);} 			
-	    default public void send(String topic, String key, T payload, ListenableFutureCallback<? super SendResult<String, T>> callback) {this.doSend(topic, key, payload, null, callback);}	    
-	    default public void send(String topic, String key, T payload, Map<String,String> header, ListenableFutureCallback<? super SendResult<String, T>> callback) {this.doSend(topic, key, payload, header, callback);}
-
-	    default public Map<String,String> addDefaultHeader(Map<String,String> header) {
-	    	return MetaUtil.getKafkaUtil().addDefault(header);
-	    }
-	    
-//	    default public KafkaProducer<String, T> kafkaProducerJsonValue(){
-//	    	KafkaProducer<String, T> producer = new KafkaProducer<>(this.getPublisherConfigMap(JsonSerializer.class));
-//	    	return producer;
-//	    }
-//
-//	    default public KafkaProducer<String, String> kafkaProducerStringValue(){
-//	    	KafkaProducer<String, String> producer = new KafkaProducer<>(this.getPublisherConfigMap(StringSerializer.class));
-//	    	return producer;
-//	    }
-
-	    @SuppressWarnings("unchecked")
-		default public KafkaTemplate<String,T> kafkaTemplateJsonValue(){
-	    	if(!KafkaTemplateContext.isExist(this)) {
-		    	ProducerFactory<String,T> producerFactory = new DefaultKafkaProducerFactory<>(this.getPublisherConfigMap(JsonSerializer.class));
-		    	KafkaTemplate<String,T> kafkaTemplate = new KafkaTemplate<>(producerFactory);
-		    	KafkaTemplateContext.setKafkaTemplate(this,kafkaTemplate);
-		    	return kafkaTemplate;
-	    	}
-	    	return (KafkaTemplate<String, T>) KafkaTemplateContext.getKafkaTemplate(this);
-	    }
-	    
-	    @SuppressWarnings("unchecked")
-	    default public KafkaTemplate<String,String> kafkaTemplateStringValue(){
-	    	if(!KafkaTemplateContext.isExist(this)) {
-	    		ProducerFactory<String,String> producerFactory = new DefaultKafkaProducerFactory<>(this.getPublisherConfigMap(StringSerializer.class));
-		    	KafkaTemplate<String,String> kafkaTemplate = new KafkaTemplate<>(producerFactory);
-		    	KafkaTemplateContext.setKafkaTemplate(this,kafkaTemplate);
-		    	return kafkaTemplate;
-	    	}
-	    	return (KafkaTemplate<String, String>) KafkaTemplateContext.getKafkaTemplate(this);
-	    }	
-	    
-	    default public ProducerRecord<String,T> makeProducerRecord(String topic, String key, T payload, Map<String, String> header){
-			ProducerRecord<String,T> producerRecord = StringUtils.isBlank(key)?new ProducerRecord<>(topic,payload):new ProducerRecord<>(topic,key,payload);		 				
-			this.addDefaultHeader(header).forEach((k,v)->producerRecord.headers().add(new RecordHeader(k,v.getBytes())));
-			return producerRecord;
-	    }
-
-	    default public Map<String, Object> getPublisherConfigMap(Object valueClass){
-			Map<String, Object> configMap = new HashMap<>();
-			
-			configMap.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, MetaUtil.getKafkaUtil().getBootstrapServers());					
-			configMap.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);			
-			configMap.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueClass);			
-			configMap.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 1000);
-			configMap.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, "10485760");
-			configMap.put(ProducerConfig.ACKS_CONFIG,"1"); // ack=0, ack=1, ack=all
-			//configMap.put(ProducerConfig.RETRIES_CONFIG,  1);
-			//configMap.put(ProducerConfig.BATCH_SIZE_CONFIG, 20000);
-			//configMap.put(ProducerConfig.LINGER_MS_CONFIG, 1);
-			//configMap.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 24568545);	
-			return configMap;
-	    }
-
-	    default public ListenableFutureCallback<SendResult<String,T>> callback(){
-			return new ListenableFutureCallback<SendResult<String,T>>() {
-	            @Override
-	            public void onSuccess(SendResult<String, T> result) {
-	                log.debug(">> topic:{}",result.getProducerRecord().topic());
-	                log.debug(">> key:{}",result.getProducerRecord().key());
-	                //log.debug("headers:{}",result.getProducerRecord().headers());
-	                log.debug(">> headers:{}",MetaUtil.getKafkaUtil().convertMap(result.getProducerRecord().headers()));
-	                log.debug(">> payload:{}",result.getProducerRecord().value());	                
-	                
-	                log.debug(">> partition:{}",result.getRecordMetadata().partition());                
-	                log.debug(">> offset:{}",result.getRecordMetadata().offset());
-	                log.debug(">> timestamp:{}",result.getRecordMetadata().timestamp());
-	            }
-
-	            @Override
-	            public void onFailure(Throwable ex) {
-	                log.error("error:{}",ex.getMessage());
-	            }
-			};   
-	   }
-	}
-    
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-}
-
-

+ 4 - 1
src/main/java/kr/co/iya/base/config/SwaggerConfig.java

@@ -8,8 +8,9 @@ import org.springframework.context.annotation.Configuration;
 
 import io.swagger.v3.oas.models.OpenAPI;
 import io.swagger.v3.oas.models.info.Info;
+import lombok.extern.slf4j.Slf4j;
 
-
+@Slf4j
 @Configuration
 @ConditionalOnExpression("T(kr.co.iya.base.SwaggerConfig).isEnabled()")
 @ConditionalOnWebApplication
@@ -34,6 +35,8 @@ public class SwaggerConfig {
     	info.setTitle(this.name+"-"+this.springProfilesActive);
     	info.setDescription(this.description);
     	info.version(this.version);
+    	
+    	log.debug("swaager service name : {} ",this.name+"-"+this.springProfilesActive);
       	
     	OpenAPI openApi = new OpenAPI();
         openApi.setInfo(info);

+ 0 - 370
src/main/java/kr/co/iya/base/context/GcpContext.java

@@ -1,370 +0,0 @@
-package kr.co.iya.base.context;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.Serializable;
-import java.nio.charset.StandardCharsets;
-import java.time.ZonedDateTime;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-import javax.annotation.PostConstruct;
-
-import org.apache.commons.lang3.StringUtils;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
-import org.springframework.boot.context.properties.ConfigurationProperties;
-import org.springframework.core.io.Resource;
-import org.springframework.stereotype.Component;
-
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.PropertyAccessor;
-import com.fasterxml.jackson.databind.DeserializationFeature;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.SerializationFeature;
-import com.google.api.client.json.GenericJson;
-import com.google.api.client.json.JsonObjectParser;
-import com.google.api.client.json.gson.GsonFactory;
-import com.google.auth.oauth2.ServiceAccountCredentials;
-import com.google.cloud.bigquery.BigQuery;
-import com.google.cloud.bigquery.BigQueryOptions;
-import com.google.cloud.bigquery.CsvOptions;
-import com.google.cloud.bigquery.Dataset;
-import com.google.cloud.bigquery.Schema;
-import com.google.cloud.bigquery.Table;
-import com.google.cloud.bigquery.TableResult;
-import com.google.cloud.spring.autoconfigure.core.GcpProperties;
-import com.google.cloud.storage.Blob;
-import com.google.cloud.storage.Bucket;
-import com.google.cloud.storage.Storage;
-import com.google.cloud.storage.StorageOptions;
-
-import kr.co.iya.base.exception.SystemException;
-import lombok.Data;
-import lombok.extern.slf4j.Slf4j;
-
-@Slf4j
-@Component
-@ConditionalOnExpression("T(kr.co.iya.base.GcpContext).isEnabled()")
-public class GcpContext {
-
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-	@Component
-	@ConfigurationProperties(prefix="utcb.cloud.gcp.context")
-	@Data
-	public static class UtcbGcpProperties{
-		private boolean enabled;
-		private String defaultProjectId;
-		private List<Resource> credentials;
-	}
-	
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-	public static interface BigQueryHandler {
-		
-		public BigQuery getBigQuery();
-		
-		public Map<String,Object> convert(Dataset dataset);
-		public Map<String,Object> convert(Table dataset);
-		public Map<String,Object> convert(Blob blob);		
-		public List<Map<String,Object>> convert(List<?> list);
-
-	    public List<Dataset> listDatasets(String projectId);
-	    public Dataset getDataset(String projectId, String datasetName);	    
-	    public boolean isExistDataset(String projectId, String datasetName);
-	    public Dataset createDataset(String projectId, String datasetName);
-		
-	    public List<Table> listTables(String projectId, String datasetName);	        
-	    public Table getTable(String projectId, String datasetName, String tableName);	        
-	    public boolean isExistTable(String projectId, String datasetName, String tableName);	    
-	    
-	    public TableResult executeQuery(String projectId, String datasetName, String query, boolean isLegacySql);
-	    
-	    public Table createTable(String projectId, String datasetName, String tableName, Schema schema);
-
-	    public Table createTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri); 
-	    public Table createTableFromCsv(String projectId, String datasetName, String tableName, Schema schema, String bucketUri); 
-	    public Table createTableFromCsv(String projectId, String datasetName, String tableName, Schema schema, String bucketUri, CsvOptions csvOptions);	    
-	    public Table createTableFromQuery(String projectId, String datasetName, String tableName, String query, boolean isLegacySql);
-	    
-	    public Table truncateTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri);
-	    public Table truncateTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri, CsvOptions csvOptions);
-	    public Table truncateTableFromQuery(String projectId, String datasvsetName, String tableName, String query, boolean isLegacySql);
-	    
-	    public Table appendTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri);
-	    public Table appendTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri, CsvOptions csvOptions);
-	    public Table appendTableFromQuery(String projectId, String datasetName, String tableName, String query, boolean isLegacySql);
-	   
-	    public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri);
-	    public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri, String fieldDelimiter);
-	    public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri, String fieldDelimiter, boolean printHeader);
-	    public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri, String fieldDelimiter, boolean printHeader, boolean isCompressed);
-
-	    public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri);
-	    public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri, String fieldDelimiter);
-	    public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri, String fieldDelimiter, boolean printHeader);
-	    public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri, String fieldDelimiter, boolean printHeader, boolean isCompressed);
-	}
-	
-	public static interface StorageHandler {
-		
-		public enum UploadType {buffer,stream};
-		public enum CompressType {gzip,nomal};
-		
-		public Storage getStorage();
-		
-		public Map<String,Object> convert(Blob blob);
-		public List<Map<String,Object>> convert(List<Blob> blob);
-		
-	    public boolean isExistBucket(String bucket);
-	    public boolean isExistObject(String bucket, String remotePath);
-		public boolean isDirectory(String bucket, String remotePath);
-	    
-	    public Bucket getBucket(String bucket);	    
-		public Blob getObject(String bucket, String remotePath);
-
-		public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetRemotePath);
-		public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetRemotePath, boolean isOverWrite);
-		public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetBucket, String targetRemotePath);
-		public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetBucket, String targetRemotePath, boolean isOverWrite);
-		
-		public List<String> listBuckets();
-		public List<Blob> listObjects(String bucket);
-		public List<Blob> listObjects(String bucket, String remotePath);
-
-		public List<Blob> upload(String bucket, String storePath, String remotePath) throws IOException;		
-		public List<Blob> upload(String bucket, String storePath, String remotePath, UploadType uploadType) throws IOException;
-		public List<Blob> upload(String bucket, String storePath, String remotePath, boolean isOverWrite) throws IOException;
-		public List<Blob> upload(String bucket, String storePath, String remotePath, UploadType uploadType, boolean isOverWrite) throws IOException;
-		
-		public BucketObjectInfo download(String bucket, String remotePath, OutputStream outputStream);
-		public BucketObjectInfo download(String bucket, String remotePath, File file);
-		public List<BucketObjectInfo> download(String bucket, String remotePath, String storePath);
-		public List<BucketObjectInfo> download(String bucket, String remotePath, String storePath, boolean isOverWrite);
-		
-		public void delete(String bucket, String remotePath);
-		
-		public Blob compose(String bucket, List<String> sources, String targetPath);
-	}
-	
-	@Data
-	public static class BucketObjectInfo implements Serializable {
-		private static final long serialVersionUID = 1L;
-		private Map<String,String> systemInfo;
-		private String bucket;
-		private String path;		
-		private String name;
-		private String contentType;		
-		private Long size;
-		private boolean isDirectory;
-		private ZonedDateTime createTime;
-		private ZonedDateTime updateTime;
-		private String downloadStorePath;
-		private Long downloadTime;
-		private Map<String, String> metadata;
-	}
-
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-	@Autowired
-	private UtcbGcpProperties utcbGcpProperties;
-
-	@PostConstruct
-	private void postConstruct() throws Exception {	
-		if(this.utcbGcpProperties == null || !this.utcbGcpProperties.isEnabled()) {return;}
-
-		JsonObjectParser parser = new JsonObjectParser(GsonFactory.getDefaultInstance());		    
-		List<Resource> list = this.utcbGcpProperties.getCredentials();
-		if(list != null) {
-			String firstProjectId = null;
-			for(Resource resource : list) {
-				GenericJson jsonData = parser.parseAndClose(resource.getInputStream(), StandardCharsets.UTF_8, GenericJson.class);
-				String projectId = jsonData.get("project_id").toString();
-				if(firstProjectId == null) {firstProjectId=projectId;}
-				log.debug(">> gcp.projectId: {}",projectId);
-				this.authContext.put(projectId,new GcpAuthInfo(projectId,resource,jsonData));
-			}
-			if(StringUtils.isEmpty(this.defaultProjectId)) {
-				this.defaultProjectId = firstProjectId;
-			}
-		}
-		this.isEnabled = true;
-	}
-
-	public void initContext(GcpProperties properties, BigQuery defaultBigQuery, Storage defaultStorage){
-		if(properties == null) {return;}
-		
-		try {
-			JsonObjectParser parser = new JsonObjectParser(GsonFactory.getDefaultInstance());	
-
-			Resource resource = properties.getCredentials().getLocation();
-			GenericJson jsonData = parser.parseAndClose(resource.getInputStream(),StandardCharsets.UTF_8, GenericJson.class);
-			String credentialProjectId = jsonData.get("project_id").toString();
-							
-			this.isEnabled = false;
-			this.authContext = new HashMap<>();
-			this.bigQueryContext = new HashMap<>();
-			this.bigQueryHandler = new HashMap<>();			
-			this.storageContext = new HashMap<>();
-			this.storageHandler = new HashMap<>();
-
-			if(StringUtils.isEmpty(properties.getProjectId())) {
-				properties.setProjectId(credentialProjectId);
-			}
-
-			this.defaultProjectId = properties.getProjectId();
-			
-			this.authContext.put(this.defaultProjectId ,new GcpAuthInfo(this.defaultProjectId,resource,jsonData));
-			if(defaultBigQuery != null) {
-				this.bigQueryContext.put(this.defaultProjectId, defaultBigQuery);
-				if(!this.defaultProjectId.equals(credentialProjectId)){this.bigQueryContext.put(credentialProjectId, defaultBigQuery);}
-			}
-			if(defaultStorage != null) {
-				this.storageContext.put(this.defaultProjectId, defaultStorage);
-				if(!this.defaultProjectId.equals(credentialProjectId)){this.storageContext.put(credentialProjectId, defaultStorage);}
-			}
-		}catch(Exception e){
-			throw new SystemException(e.getMessage());
-		}
-		
-		this.isEnabled = true;
-	}
-
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-	
-	@Data
-	private class GcpAuthInfo{
-		private String projectId;
-		private Resource resource;
-		private GenericJson jsonData;
-
-		public GcpAuthInfo(String projectId,Resource resource,GenericJson jsonData) {
-			this.projectId = projectId;
-			this.resource = resource;
-			this.jsonData = jsonData;
-		}
-	}
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-	private boolean isEnabled = false;
-
-	private String defaultProjectId;
-
-	private Map<String,GcpAuthInfo> authContext = new HashMap<>();
-	
-	private Map<String,BigQuery> bigQueryContext = new HashMap<>();
-	private Map<String,BigQueryHandler> bigQueryHandler = new HashMap<>();
-	
-	private Map<String,Storage> storageContext = new HashMap<>();
-	private Map<String,StorageHandler> storageHandler = new HashMap<>();
-
-	private ObjectMapper mapper;
-	
-
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-	public boolean isEnabled() {
-		return this.isEnabled;
-	}
-	
-	public String getDefaultProjectId() {
-		return this.defaultProjectId;
-	}
-	
-	public Resource getResource(String projectId) {
-		GcpAuthInfo info = this.authContext.get(projectId);
-		return info == null?null:info.getResource();
-	}
-
-	public GenericJson getGenericJson(String projectId) {
-		GcpAuthInfo info = this.authContext.get(projectId);
-		return info == null?null:info.getJsonData();
-	}
-
-	public List<String> getProjects(){			
-	    return this.authContext.keySet().stream().collect(Collectors.toCollection(ArrayList::new));
-	}
-
-	public BigQuery getDefaultBigQuery() {
-		if(StringUtils.isEmpty(this.defaultProjectId)) {throw new SystemException("defaultProjectId is empty or null.");}
-		return this.getBigQuery(this.defaultProjectId);
-	}
-	
-	public BigQuery getBigQuery(String projectId) {
-		if(this.bigQueryContext != null && this.bigQueryContext.containsKey(projectId)) {
-			return this.bigQueryContext.get(projectId);
-		}
-		try {			
-			Resource resource = this.getResource(projectId);
-			if(resource == null) {throw new SystemException("serviceAccountCredentials's resource("+projectId+") is null");}
-			BigQuery bigQuery = BigQueryOptions.newBuilder()
-					.setCredentials(ServiceAccountCredentials.fromStream(resource.getInputStream()))
-					.build()
-					.getService();
-			this.bigQueryContext.put(projectId, bigQuery);
-		}catch(Exception e) {
-			throw new SystemException(e.getMessage());
-		}
-		return this.getBigQuery(projectId);
-	}
-	
-	public Storage getDefaultStorage() {
-		if(StringUtils.isEmpty(this.defaultProjectId)) {throw new SystemException("defaultProjectId is empty or null.");}
-		return this.getStorage(this.defaultProjectId);
-	}
-	
-	public Storage getStorage(String projectId) {
-		if(this.storageContext != null && this.storageContext.containsKey(projectId)) {
-			return this.storageContext.get(projectId);
-		}
-		try {			
-			Resource resource = this.getResource(projectId);
-			if(resource == null) {throw new SystemException("serviceAccountCredentials's resource("+projectId+") is null");}
-			Storage storage = StorageOptions.newBuilder()
-					.setCredentials(ServiceAccountCredentials.fromStream(resource.getInputStream()))
-					.build()
-					.getService();
-			this.storageContext.put(projectId, storage);			
-		}catch(Exception e) {
-			throw new SystemException(e.getMessage());
-		}
-		return this.getStorage(projectId);
-	}
-
-	public BigQueryHandler getBigQueryHandler(String projectId) {
-		if(this.bigQueryHandler == null) {return null;}
-		return this.bigQueryHandler.get(projectId);
-	}
-
-	public void setBigQueryHandler(String projectId,BigQueryHandler handler) {
-		this.bigQueryHandler.put(projectId, handler);
-	}
-	
-	public StorageHandler getStorageHandler(String projectId) {
-		if(this.storageHandler == null) {return null;}
-		return this.storageHandler.get(projectId);
-	}
-
-	public void setStorageHandler(String projectId,StorageHandler handler) {
-		this.storageHandler.put(projectId, handler);
-	}
-
-	public ObjectMapper getMapper() {
-		if(this.mapper == null) {
-			this.mapper = new ObjectMapper();
-			this.mapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY);
-			this.mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
-			this.mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);	
-			this.mapper.enable(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT);
-		}
-		return this.mapper;
-	}
-	
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-}

+ 0 - 4
src/main/java/kr/co/iya/base/support/AbstractMetaBatch.java

@@ -10,9 +10,7 @@ import kr.co.iya.base.support.util.BashProcessUtilPack.BashProcessUtil;
 import kr.co.iya.base.support.util.CompressUtilPack.CompressUtil;
 import kr.co.iya.base.support.util.CryptoUtilPack.CryptoUtil;
 import kr.co.iya.base.support.util.FileUtilPack.FileUtil;
-import kr.co.iya.base.support.util.GcpUtilPack.GcpUtil;
 import kr.co.iya.base.support.util.JsonUtilPack.JsonUtil;
-import kr.co.iya.base.support.util.KafkaUtilPack.KafkaUtil;
 import kr.co.iya.base.support.util.LobConverterUtilPack.LobConverterUtil;
 import kr.co.iya.base.support.util.LocaleUtilPack.LocaleUtil;
 import kr.co.iya.base.support.util.MaskUtilPack.MaskUtil;
@@ -42,10 +40,8 @@ public abstract class AbstractMetaBatch extends AbstractMeta {
 	public static final CompressUtil getCompressUtil() {return MetaUtil.getCompressUtil();} 
 	public static final CryptoUtil getCryptoUtil() {return MetaUtil.getCryptoUtil();} 
 	public static final FileUtil getFileUtil() { return MetaUtil.getFileUtil();} 
-	public static final GcpUtil getGcpUtil() {return getBean(GcpUtil.class);}	
 
 	public static final JsonUtil getJsonUtil() {return MetaUtil.getJsonUtil();}
-    public static final KafkaUtil getKafkaUtil() {return MetaUtil.getKafkaUtil();}	
 	public static final LobConverterUtil getLobConvertUtil() {return MetaUtil.getLobConvertUtil();}
 	public static final LocaleUtil getLocaleUtil() {return MetaUtil.getLocaleUtil();} 
 	public static final MaskUtil getMaskUtil() {return MetaUtil.getMaskUtil();} 	

+ 0 - 4
src/main/java/kr/co/iya/base/support/AbstractMetaOnline.java

@@ -10,10 +10,8 @@ import kr.co.iya.base.support.util.BashProcessUtilPack.BashProcessUtil;
 import kr.co.iya.base.support.util.CompressUtilPack.CompressUtil;
 import kr.co.iya.base.support.util.CryptoUtilPack.CryptoUtil;
 import kr.co.iya.base.support.util.FileUtilPack.FileUtil;
-import kr.co.iya.base.support.util.GcpUtilPack.GcpUtil;
 import kr.co.iya.base.support.util.HttpRequestUtilPack.HttpRequestUtil;
 import kr.co.iya.base.support.util.JsonUtilPack.JsonUtil;
-import kr.co.iya.base.support.util.KafkaUtilPack.KafkaUtil;
 import kr.co.iya.base.support.util.LobConverterUtilPack.LobConverterUtil;
 import kr.co.iya.base.support.util.LocaleUtilPack.LocaleUtil;
 import kr.co.iya.base.support.util.MaskUtilPack.MaskUtil;
@@ -44,12 +42,10 @@ public abstract class AbstractMetaOnline extends AbstractMeta {
 	public static final CompressUtil getCompressUtil() {return MetaUtil.getCompressUtil();} 
 	public static final CryptoUtil getCryptoUtil() {return MetaUtil.getCryptoUtil();} 
 	public static final FileUtil getFileUtil() { return MetaUtil.getFileUtil();} 
-	public static final GcpUtil getGcpUtil() {return getBean(GcpUtil.class);}	
 	
 	public static final HttpRequestUtil getHttpRequestUtil() {return MetaUtil.getHttpRequestUtil();}
 	
 	public static final JsonUtil getJsonUtil() {return MetaUtil.getJsonUtil();}
-    public static final KafkaUtil getKafkaUtil() {return MetaUtil.getKafkaUtil();}	
 	public static final LobConverterUtil getLobConvertUtil() {return MetaUtil.getLobConvertUtil();}
 	public static final LocaleUtil getLocaleUtil() {return MetaUtil.getLocaleUtil();} 
 	public static final MaskUtil getMaskUtil() {return MetaUtil.getMaskUtil();} 	

+ 0 - 4
src/main/java/kr/co/iya/base/support/AbstractUtil.java

@@ -12,10 +12,8 @@ import kr.co.iya.base.support.util.BashProcessUtilPack.BashProcessUtil;
 import kr.co.iya.base.support.util.CompressUtilPack.CompressUtil;
 import kr.co.iya.base.support.util.CryptoUtilPack.CryptoUtil;
 import kr.co.iya.base.support.util.FileUtilPack.FileUtil;
-import kr.co.iya.base.support.util.GcpUtilPack.GcpUtil;
 import kr.co.iya.base.support.util.HttpRequestUtilPack.HttpRequestUtil;
 import kr.co.iya.base.support.util.JsonUtilPack.JsonUtil;
-import kr.co.iya.base.support.util.KafkaUtilPack.KafkaUtil;
 import kr.co.iya.base.support.util.LobConverterUtilPack.LobConverterUtil;
 import kr.co.iya.base.support.util.LocaleUtilPack.LocaleUtil;
 import kr.co.iya.base.support.util.MaskUtilPack.MaskUtil;
@@ -67,11 +65,9 @@ public abstract class AbstractUtil {
 	public static final CompressUtil getCompressUtil() {return getBean(CompressUtil.class);} 
 	public static final CryptoUtil getCryptoUtil() {return getBean(CryptoUtil.class);} 
 	public static final FileUtil getFileUtil() { return getBean(FileUtil.class);} 	
-	public static final GcpUtil getGcpUtil() {return getBean(GcpUtil.class);}	
 	
 	public static final HttpRequestUtil getHttpRequestUtil() {return getBean(HttpRequestUtil.class);}
 	public static final JsonUtil getJsonUtil() {return getBean(JsonUtil.class);}
-    public static final KafkaUtil getKafkaUtil() {return getBean(KafkaUtil.class);}	
 	public static final LobConverterUtil getLobConvertUtil() {return getBean(LobConverterUtil.class);}
 	public static final LocaleUtil getLocaleUtil() {return getBean(LocaleUtil.class);} 
 	public static final MaskUtil getMaskUtil() {return getBean(MaskUtil.class);} 	

+ 0 - 1146
src/main/java/kr/co/iya/base/support/util/GcpUtilPack.java

@@ -1,1146 +0,0 @@
-package kr.co.iya.base.support.util;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.file.Paths;
-import java.text.SimpleDateFormat;
-import java.time.ZoneId;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.threeten.bp.Duration;
-
-import com.google.api.gax.paging.Page;
-import com.google.cloud.RetryOption;
-import com.google.cloud.bigquery.BigQuery;
-import com.google.cloud.bigquery.BigQuery.DatasetListOption;
-import com.google.cloud.bigquery.BigQueryException;
-import com.google.cloud.bigquery.CsvOptions;
-import com.google.cloud.bigquery.Dataset;
-import com.google.cloud.bigquery.DatasetId;
-import com.google.cloud.bigquery.DatasetInfo;
-import com.google.cloud.bigquery.ExtractJobConfiguration;
-import com.google.cloud.bigquery.Job;
-import com.google.cloud.bigquery.JobId;
-import com.google.cloud.bigquery.JobInfo;
-import com.google.cloud.bigquery.JobInfo.WriteDisposition;
-import com.google.cloud.bigquery.LoadJobConfiguration;
-import com.google.cloud.bigquery.QueryJobConfiguration;
-import com.google.cloud.bigquery.Schema;
-import com.google.cloud.bigquery.StandardTableDefinition;
-import com.google.cloud.bigquery.Table;
-import com.google.cloud.bigquery.TableDefinition;
-import com.google.cloud.bigquery.TableId;
-import com.google.cloud.bigquery.TableInfo;
-import com.google.cloud.bigquery.TableResult;
-import com.google.cloud.spring.autoconfigure.core.GcpProperties;
-import com.google.cloud.storage.Blob;
-import com.google.cloud.storage.BlobId;
-import com.google.cloud.storage.BlobInfo;
-import com.google.cloud.storage.Bucket;
-import com.google.cloud.storage.Storage;
-
-import kr.co.iya.base.context.GcpContext;
-import kr.co.iya.base.context.GcpContext.BigQueryHandler;
-import kr.co.iya.base.context.GcpContext.BucketObjectInfo;
-import kr.co.iya.base.context.GcpContext.StorageHandler;
-import kr.co.iya.base.exception.SystemException;
-import kr.co.iya.base.support.aid.BeanAidPack.BeanAid;
-import kr.co.iya.base.support.util.SystemUtilPack.SystemUtil;
-import lombok.extern.slf4j.Slf4j;
-
-@Slf4j
-public final class GcpUtilPack  {
-	
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-	
-	public interface GcpUtil {
-
-		public GcpContext getContext();
-		
-		public StorageHandler getDefaultStorageHandler();		
-		public BigQueryHandler getDefaultBigQueryHandler();
-		
-		public StorageHandler getStorageHandler(String projectId);		
-		public BigQueryHandler getBigQueryHandler(String projectId);
-	}
-
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-	public static GcpUtil getGcpUtil(BeanAid beanAid) {
-		
-		return new GcpUtil() {
-			
-			private GcpContext context = null;
-			private Map<String,String> systemInfo = null;
-			
-			@Override
-			public GcpContext getContext() {
-				
-				if(this.context == null) {
-					this.context = beanAid.getBean(GcpContext.class);
-				}
-				
-				if(this.context == null || !this.context.isEnabled()) {
-
-					BigQuery defaultBigQuery = beanAid.getBean(BigQuery.class);
-					//log.debug(">> defaultBigQuery: {}",defaultBigQuery == null?null:defaultBigQuery.hashCode());
-					
-					Storage defaultStorage = beanAid.getBean(Storage.class);		
-					//log.debug(">> defaultStorage: {}",defaultStorage == null?null:defaultStorage.hashCode());
-					
-					this.context.initContext(beanAid.getBean(GcpProperties.class), defaultBigQuery, defaultStorage);
-					
-					this.getDefaultBigQueryHandler();
-					this.getDefaultStorageHandler();					
-				}	
-				
-				return this.context;
-			}
-			
-			@Override
-			public StorageHandler getDefaultStorageHandler() {
-				return this.getStorageHandler(this.getContext().getDefaultProjectId());
-			}
-			
-			@Override
-			public BigQueryHandler getDefaultBigQueryHandler() {
-				return this.getBigQueryHandler(this.getContext().getDefaultProjectId());
-			}
-			
-			@Override
-			public BigQueryHandler getBigQueryHandler(String projectId) {
-				BigQueryHandler handler = this.getContext().getBigQueryHandler(projectId);
-				if(handler != null) {return handler;}
-				
-				BigQuery bigQuery = this.getContext().getBigQuery(projectId);
-				if(bigQuery == null) {return null;}
-				
-				this.getContext().setBigQueryHandler(projectId, this.buildBigQueryHandler(bigQuery));
-				return this.getBigQueryHandler(projectId);
-			}
-			
-			@Override
-			public StorageHandler getStorageHandler(String projectId){
-				StorageHandler handler = this.getContext().getStorageHandler(projectId);
-				if(handler != null) {return handler;}
-				
-				Storage storage = this.getContext().getStorage(projectId);
-				if(storage == null) {return null;}
-				
-				this.getContext().setStorageHandler(projectId, this.buildStorageHandler(storage));
-				return this.getStorageHandler(projectId);
-			}
-						
-			private BigQueryHandler buildBigQueryHandler(BigQuery bigQuery) {
-				return new BigQueryHandler() {
-					
-					@Override
-					public BigQuery getBigQuery() {
-						return bigQuery;
-					}
-					
-					@Override
-					public Map<String,Object> convert(Table table){
-						if(table == null) {return null;}
-						Map<String,Object> map = new HashMap<>();
-						map.put("tableId", table.getTableId());
-						map.put("etag", table.getEtag());
-						map.put("generatedId", table.getGeneratedId());
-						map.put("selfLink", table.getSelfLink());
-						map.put("friendlyName", table.getFriendlyName());
-						map.put("description", table.getDescription());
-						map.put("expirationTime", table.getExpirationTime());
-						map.put("creationTime", table.getCreationTime());
-						map.put("lastModifiedTime", table.getLastModifiedTime());
-						map.put("numBytes", table.getNumBytes());
-						map.put("numLongTermBytes", table.getNumLongTermBytes());
-						map.put("numTimeTravelPhysicalBytes", table.getNumTimeTravelPhysicalBytes());
-						map.put("numTotalLogicalBytes", table.getNumTotalLogicalBytes());
-						map.put("numActiveLogicalBytes", table.getNumActiveLogicalBytes());
-						map.put("numLongTermLogicalBytes", table.getNumLongTermLogicalBytes());
-						map.put("numTotalPhysicalBytes", table.getNumTotalPhysicalBytes());
-						map.put("numActivePhysicalBytes", table.getNumActivePhysicalBytes());
-						map.put("numLongTermPhysicalBytes", table.getNumLongTermPhysicalBytes());
-						map.put("numRows", table.getNumRows());
-						map.put("definition", table.getDefinition().toString());
-						map.put("encryptionConfiguration", table.getEncryptionConfiguration());
-						map.put("labels", table.getLabels());
-						map.put("requirePartitionFilter", table.getRequirePartitionFilter());
-						map.put("defaultCollation", table.getDefaultCollation());
-						map.put("cloneDefinition", table.getCloneDefinition());
-						map.put("tableConstraints", table.getTableConstraints());				
-						return map;
-					}
-					
-					@Override	
-					public Map<String,Object> convert(Blob blob){
-						Map<String,Object> map = new HashMap<>();
-						map.put("systemInfo",getSystemInfo());						
-						map.put("bucket",blob.getBucket());
-						map.put("path",blob.getName());
-						map.put("name",extractFileName(blob.getName()));
-						map.put("size",blob.getSize());
-						map.put("directory",blob.isDirectory());
-						map.put("createTime",blob.getCreateTimeOffsetDateTime() == null?null:blob.getCreateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()).toString());			
-						map.put("updateTime",blob.getUpdateTimeOffsetDateTime() == null?null:blob.getUpdateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()).toString());
-						map.put("contentType",blob.getContentType());
-						map.put("downloadStorePath","");
-						map.put("downloadTime","");				
-						map.put("metadata",blob.getMetadata());
-						return map;
-					}
-					
-					@Override
-					public Map<String,Object> convert(Dataset dataset) {
-						if(dataset == null) {return null;}
-						Map<String,Object> map = new HashMap<>();
-						map.put("datasetId", dataset.getDatasetId());
-						map.put("creationTime", dataset.getCreationTime());
-						map.put("defaultTableLifetime", dataset.getDefaultTableLifetime());
-						map.put("description", dataset.getDescription());
-						map.put("etag", dataset.getEtag());
-						map.put("friendlyName", dataset.getFriendlyName());
-						map.put("generatedId", dataset.getGeneratedId());
-						map.put("lastModified", dataset.getLastModified());
-						map.put("location", dataset.getLocation());
-						map.put("selfLink", dataset.getSelfLink());
-						map.put("acl", dataset.getAcl());
-						map.put("labels", dataset.getLabels());
-						map.put("defaultEncryptionConfiguration", dataset.getDefaultEncryptionConfiguration());
-						map.put("defaultPartitionExpirationMs", dataset.getDefaultPartitionExpirationMs());
-						map.put("defaultCollation", dataset.getDefaultCollation());
-						map.put("externalDatasetReference", dataset.getExternalDatasetReference());
-						map.put("storageBillingModel", dataset.getStorageBillingModel());
-				        return map;
-					}
-
-					@Override
-					public List<Map<String,Object>> convert(List<?> list){						
-						if(list == null) {return null;}
-						List<Map<String,Object>> result = new ArrayList<>();
-						list.forEach(i->{
-							if(i instanceof Dataset){result.add(this.convert((Dataset)i));}
-							if(i instanceof Table){result.add(this.convert((Table)i));}
-							if(i instanceof Blob){result.add(this.convert((Blob)i));}
-						});
-						return result;
-					}
-					
-					@Override
-				    public List<Dataset> listDatasets(String projectId) {
-				    	Page<Dataset> datasets = bigQuery.listDatasets(projectId,DatasetListOption.all());				    	
-						List<Dataset> list = new ArrayList<>();
-						datasets.iterateAll().forEach(dataset->{list.add(dataset);});
-						return list;
-				    }
-
-					@Override
-				    public Dataset getDataset(String projectId, String datasetName) {
-				    	return bigQuery.getDataset(DatasetId.of(projectId,datasetName));
-				    }
-				    
-					@Override
-				    public boolean isExistDataset(String projectId, String datasetName) {
-				    	return this.getDataset(projectId,datasetName) == null?false:true;
-				    }
-
-					@Override
-				    public Dataset createDataset(String projectId, String datasetName) {
-				    	if(this.isExistDataset(projectId, datasetName)) {throw new SystemException("dataset("+datasetName+") is already exists.");}
-				    	return bigQuery.create(DatasetInfo.newBuilder(projectId,datasetName).build());
-				    }
-
-					@Override
-				    public List<Table> listTables(String projectId, String datasetName) {
-				    	if(!this.isExistDataset(projectId, datasetName)) {throw new SystemException("dataset("+datasetName+") is not found.");}
-				    	Page<Table> tables = bigQuery.listTables(DatasetId.of(projectId, datasetName));						
-				    	List<Table> list = new ArrayList<>();    
-				    	tables.iterateAll().forEach(table->{list.add(table);});
-				    	return list;
-				    }
-
-					@Override
-				    public Table getTable(String projectId, String datasetName, String tableName) {
-				    	return bigQuery.getTable(TableId.of(projectId, datasetName, tableName));
-				    }
-
-					@Override
-				    public boolean isExistTable(String projectId, String datasetName, String tableName) {
-				    	return this.getTable(projectId, datasetName, tableName) == null?false:true;
-				    }
-				    
-					@Override			
-				    public Table createTable(String projectId, String datasetName, String tableName, Schema schema) {
-				    	if(this.isExistTable(projectId, datasetName, tableName)) {throw new SystemException("table("+tableName+") is already exists.");}				       
-				    	TableId tableId = TableId.of(projectId, datasetName, tableName);
-				        TableDefinition tableDefinition = StandardTableDefinition.of(schema == null?Schema.of():schema);				        
-				        return bigQuery.create(TableInfo.newBuilder(tableId, tableDefinition).build());    	
-				    }
-					 
-					@Override	
-					public Table createTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri) {
-						return this.createTableFromCsv(projectId, datasetName, tableName, null, bucketUri);
-					}
-					
-
-					@Override	
-				    public Table createTableFromCsv(String projectId, String datasetName, String tableName, Schema schema, String bucketUri) {
-						return this.createTableFromCsv(projectId, datasetName, tableName, schema, bucketUri, null);
-					}
-				    
-					@Override			
-				    public Table createTableFromCsv(String projectId, String datasetName, String tableName, Schema schema, String bucketUri, CsvOptions csvOptions) {
-						return this._workTableFromCsv(projectId, datasetName, tableName, schema, bucketUri, csvOptions, null);
-				    }
-					
-					@Override	
-				    public TableResult executeQuery(String projectId, String datasetName, String query, boolean isLegacySql) {
-						TableResult result = null;
-						try{
-							QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query)
-									.setUseLegacySql(isLegacySql)
-									.build();
-							
-							JobId jobId = JobId.of(UUID.randomUUID().toString());
-							Job queryJob = bigQuery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()).waitFor();
-	
-						    if(queryJob == null) {
-						    	throw new RuntimeException("Job no longer exists");
-						    }
-						    
-						    if(queryJob.getStatus().getError() != null) {
-						    	throw new RuntimeException(queryJob.getStatus().getError().toString());
-						    }
-	
-						    result = queryJob.getQueryResults();
-						    
-				    	} catch (BigQueryException | InterruptedException e) {
-				        	throw new SystemException(e.getMessage());
-				        }
-						
-						return result;
-					}
-
-					
-					@Override	
-					public Table createTableFromQuery(String projectId, String datasetName, String tableName, String query, boolean isLegacySql) {
-						if(this.isExistTable(projectId, datasetName, tableName)) {throw new SystemException("table("+tableName+") is already exists.");}
-				    	return this._workTableFromQuery(projectId, datasetName, tableName, WriteDisposition.WRITE_TRUNCATE, query, isLegacySql);
-					}
-					
-					@Override			
-				    public Table truncateTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri) {
-						return this.truncateTableFromCsv(projectId, datasetName, tableName, bucketUri, null);
-					}
-					
-					@Override			
-				    public Table truncateTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri, CsvOptions csvOptions) {
-						return this._workTableFromCsv(projectId, datasetName, tableName, null, bucketUri, csvOptions, WriteDisposition.WRITE_TRUNCATE);
-				    }
-				   
-					@Override			
-					public Table truncateTableFromQuery(String projectId, String datasetName, String tableName, String query, boolean isLegacySql) {
-						if(!this.isExistTable(projectId, datasetName, tableName)) {throw new SystemException("table("+tableName+") is not exist.");}
-						return this._workTableFromQuery(projectId, datasetName, tableName, WriteDisposition.WRITE_TRUNCATE, query, isLegacySql);
-					}
-					 
-					@Override
-					public Table appendTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri) {
-						return this.appendTableFromCsv(projectId, datasetName, tableName, bucketUri, null);				
-					}
-
-					@Override
-					public Table appendTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri, CsvOptions csvOptions) {
-						return this._workTableFromCsv(projectId, datasetName, tableName, null, bucketUri, csvOptions, WriteDisposition.WRITE_APPEND);
-					}
-						
-					@Override
-				    public Table appendTableFromQuery(String projectId, String datasetName, String tableName, String query, boolean isLegacySql) {
-						if(!this.isExistTable(projectId, datasetName, tableName)) {throw new SystemException("table("+tableName+") is not exist.");}
-						return this._workTableFromQuery(projectId, datasetName, tableName, WriteDisposition.WRITE_APPEND, query, isLegacySql);				    	
-				    }
-				    
-					@Override
-				    public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri) {
-						return this.extractQueryToCsv(projectId, datasetName, query, isLegacySql, bucketUri, ",");
-					}
-					
-					@Override
-				    public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri, String fieldDelimiter) {	
-						return this.extractQueryToCsv(projectId, datasetName, query, isLegacySql, bucketUri, fieldDelimiter, true);						
-					}
-					
-					@Override
-				    public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri, String fieldDelimiter, boolean printHeader) {
-						return this.extractQueryToCsv(projectId, datasetName, query, isLegacySql, bucketUri, fieldDelimiter, printHeader, false);						
-					}
-					
-					@Override
-				    public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri, String fieldDelimiter, boolean printHeader, boolean isCompressed) {
-						Table table = null;
-						List<Blob> list = null;
-						try{
-							String tableName = "tb_utcb"+(new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date(System.currentTimeMillis()))).toString();
-							table = this.createTableFromQuery(projectId, datasetName, tableName, query, isLegacySql);
-							list = this.extractTableToCsv(projectId, datasetName, tableName, bucketUri, fieldDelimiter, printHeader, isCompressed);
-						} catch(Exception e) {
-							throw e;
-						} finally {
-							if(table != null) {table.delete();}
-						}
-						return list;	
-					}
-				    
-					@Override
-				    public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri) {
-				    	return this.extractTableToCsv(projectId, datasetName, tableName, bucketUri, null);
-				    }
-				    
-					@Override
-				    public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri, String fieldDelimiter) {
-						return this.extractTableToCsv(projectId, datasetName, tableName, bucketUri, fieldDelimiter, true);						
-					}
-					
-					@Override
-				    public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri, String fieldDelimiter, boolean printHeader) {
-						return this.extractTableToCsv(projectId, datasetName, tableName, bucketUri, fieldDelimiter, printHeader);							
-					}
-				    
-					@Override
-				    public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri, String fieldDelimiter, boolean printHeader, boolean isCompressed) {
-						return this._workTableToBucket(projectId, datasetName, tableName, bucketUri, "CSV", fieldDelimiter, printHeader, isCompressed?"gzip":null);
-					}
-
-					private Table _workTableFromQuery(String projectId, String datasetName, String tableName, WriteDisposition writeDisposition, String query, boolean isLegacySql) {
-						try{
-				    		TableId tableId = TableId.of(projectId, datasetName, tableName);
-				    		log.debug(">> tableId:{}",tableId);
-				    		
-				    		QueryJobConfiguration queryConfig = null;
-				    		
-					        if(writeDisposition == null) {
-					    		queryConfig = QueryJobConfiguration.newBuilder(query)
-					    				.setDestinationTable(tableId)
-					    				.setAllowLargeResults(true)
-					    				.setWriteDisposition(WriteDisposition.WRITE_EMPTY)
-					    				.setUseLegacySql(isLegacySql)
-					    				.build();
-					        }				    		
-				    		if(WriteDisposition.WRITE_TRUNCATE.equals(writeDisposition)) {
-					    		queryConfig = QueryJobConfiguration.newBuilder(query)
-					    				.setDestinationTable(tableId)
-					    				.setAllowLargeResults(true)
-					    				.setWriteDisposition(WriteDisposition.WRITE_TRUNCATE)
-					    				.setUseLegacySql(isLegacySql)
-					    				.build();
-				    		}
-				    		if(WriteDisposition.WRITE_APPEND.equals(writeDisposition)) {
-						    	queryConfig = QueryJobConfiguration.newBuilder(query)
-						    			.setDestinationTable(tableId)
-						    			.setAllowLargeResults(true)
-						    			.setWriteDisposition(WriteDisposition.WRITE_APPEND)
-						    			.setUseLegacySql(isLegacySql)
-						    			.build();
-				    		}
-				    		
-				    		TableResult result = bigQuery.query(queryConfig);
-				    		log.debug(">> extract rows:{}",result.getTotalRows());
-							 
-				    	} catch (BigQueryException | InterruptedException e) {
-				        	throw new SystemException(e.getMessage());
-				        }
-
-				    	return this.getTable(projectId, datasetName, tableName);
-					}
-
-					private Table _workTableFromCsv(String projectId, String datasetName, String tableName, Schema schema, String bucketUri, CsvOptions csvOptions, WriteDisposition writeDisposition) {
-						try{
-					        TableId tableId = TableId.of(projectId, datasetName, tableName);
-					        log.debug(">> tableId:{}",tableId);
-					        
-					        String sourceUri = bucketUri.startsWith("gs://")?bucketUri:"gs://"+bucketUri;
-					        
-					        LoadJobConfiguration configuration = null;
-
-					        if(writeDisposition == null) {
-					        	configuration = LoadJobConfiguration.newBuilder(tableId, sourceUri)
-					        			.setFormatOptions(csvOptions == null?CsvOptions.newBuilder().setSkipLeadingRows(1).build():csvOptions)
-					        			.setWriteDisposition(WriteDisposition.WRITE_EMPTY)
-						        		.setSchema(schema)
-						        		.setAutodetect(schema == null?true:false)
-						        		.build();
-					        }
-					        if(WriteDisposition.WRITE_TRUNCATE.equals(writeDisposition)) {		        
-						        configuration = LoadJobConfiguration.builder(tableId, sourceUri)
-						        		.setFormatOptions(csvOptions == null?CsvOptions.newBuilder().setSkipLeadingRows(1).build():csvOptions)
-					                    .setWriteDisposition(WriteDisposition.WRITE_TRUNCATE)
-					                    .build();
-					        }
-					        if(WriteDisposition.WRITE_APPEND.equals(writeDisposition)) {		        
-						        configuration = LoadJobConfiguration.builder(tableId, sourceUri)
-						        		.setFormatOptions(csvOptions == null?CsvOptions.newBuilder().setSkipLeadingRows(1).build():csvOptions)			        		
-					                    .setWriteDisposition(WriteDisposition.WRITE_APPEND)
-					                    .build();
-					        }
-					        
-					        Job job = bigQuery.create(JobInfo.of(configuration))
-					        		.waitFor(RetryOption.initialRetryDelay(Duration.ofSeconds(1)),RetryOption.totalTimeout(Duration.ofMinutes(60)));
-
-				    		if (job == null) {
-				    			log.debug(">> job not executed since it no longer exists.");
-				    			return null;
-				    		}
-					        if (job.getStatus().getError() != null) {
-					        	log.debug(">> bigQuery was unable to load into the table due to an error:"+ job.getStatus().getError());
-					        	return null;
-					        }
-				    		log.debug(">> data(CVS) load successful");
-					        
-				        } catch (BigQueryException | InterruptedException e) {
-				        	throw new SystemException(e.getMessage());
-				        }	
-				    	
-				    	return this.getTable(projectId, datasetName, tableName);
-					}
-							
-					private List<Blob> _workTableToBucket(String projectId, String datasetName, String tableName, String bucketUri, String dataFormat, String fieldDelimiter, Boolean printHeader, String compressed){
-				    	try{			
-				    		TableId tableId = TableId.of(projectId, datasetName, tableName);
-				    		String targetUri = bucketUri.startsWith("gs://")?bucketUri:"gs://"+bucketUri;
-				    		 
-				    		ExtractJobConfiguration configuration = ExtractJobConfiguration.newBuilder(tableId, targetUri)
-				    				.setFieldDelimiter(fieldDelimiter == null?",":fieldDelimiter)
-				    				.setPrintHeader(printHeader)
-				    				.setCompression(compressed)
-				    				.setFormat(dataFormat)
-				    				.build();
-				    	    
-				    		Job job = bigQuery.create(JobInfo.of(configuration))
-				    				.waitFor(RetryOption.initialRetryDelay(Duration.ofSeconds(1)),RetryOption.totalTimeout(Duration.ofMinutes(60)));    		
-
-				    		if (job == null) {
-				    			log.debug(">> job not executed since it no longer exists.");
-				    			return null;
-				    		}
-				    		
-				    		if (job.getStatus().getError() != null) {
-				    			log.debug(">> bigQuery was unable to extract due to an error: \n" + job.getStatus().getError());
-				    			return null;
-				    		}
-				    		log.debug(">> Table export successful");
-				    		
-				        } catch (BigQueryException | InterruptedException e) {
-				        	throw new SystemException(e.getMessage());
-				        }
-
-						if(bucketUri.startsWith("gs://")) {bucketUri = bucketUri.substring("gs://".length());}
-						String bucket = bucketUri.indexOf("/")!=-1?bucketUri.substring(0,bucketUri.indexOf("/")):bucketUri;
-						String remotePath = bucketUri.indexOf("/")!=-1?bucketUri.substring(bucketUri.indexOf("/")+1):"";
-						
-						List<Blob> list = getDefaultStorageHandler().listObjects(bucket,remotePath);
-						
-				    	return list;
-				    }	
-					
-				};
-			}
-			
-			private StorageHandler buildStorageHandler(Storage storage) {
-				return new StorageHandler() {
-
-					@Override
-					public Storage getStorage() {
-						return storage;
-					}
-					
-					@Override	
-					public Map<String,Object> convert(Blob blob){						
-						Map<String,Object> map = new HashMap<>();	
-						map.put("systemInfo",getSystemInfo());
-						map.put("bucket",blob.getBucket());
-						map.put("path",blob.getName());
-						map.put("name",extractFileName(blob.getName()));
-						map.put("size",blob.getSize());
-						map.put("directory",blob.isDirectory());
-						map.put("createTime",blob.getCreateTimeOffsetDateTime() == null?null:blob.getCreateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()).toString());			
-						map.put("updateTime",blob.getUpdateTimeOffsetDateTime() == null?null:blob.getUpdateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()).toString());
-						map.put("contentType",blob.getContentType());
-						map.put("downloadStorePath","");
-						map.put("downloadTime","");				
-						map.put("metadata",blob.getMetadata());
-						
-						return map;
-					}
-					
-					@Override	
-					public List<Map<String,Object>> convert(List<Blob> list){
-						if(list == null) {return null;}
-						List<Map<String,Object>> result = new ArrayList<>();
-						list.forEach(i->result.add(this.convert(i)));
-						return result;
-					}
-					
-					@Override	
-					public boolean isExistBucket(String bucket) {
-						return this.getBucket(bucket) == null?false:true;
-					}
-					
-					@Override	
-					public boolean isExistObject(String bucket, String remotePath) {
-						return this.getObject(bucket, remotePath) == null?false:true;
-					}
-					
-					@Override
-					public boolean isDirectory(String bucket, String remotePath) {
-						Blob blob = storage.get(BlobId.of(bucket, trimSeparator(remotePath)));				
-			        	//log.debug(">> [isDirectory] blob:{}",blob);
-						if(blob != null && !blob.isDirectory()) {return false;}			        	
-						return true;
-					}
-					
-					@Override	
-				    public Bucket getBucket(String bucket) {
-						return storage.get(bucket);
-					}
-					
-					@Override	
-					public Blob getObject(String bucket, String remotePath) {
-						return storage.get(bucket, remotePath);
-					}
-					
-					@Override
-					public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetRemotePath) {
-						return this.move(sourceBucket, sourceRemotePath, sourceBucket, targetRemotePath);
-					}
-
-					@Override
-					public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetRemotePath, boolean isOverWrite){
-						return this.move(sourceBucket, sourceRemotePath, sourceBucket, targetRemotePath, isOverWrite);
-					}
-					
-					@Override
-					public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetBucket, String targetRemotePath) {
-						return this._move(sourceBucket, sourceRemotePath, targetBucket, targetRemotePath, true, 0);
-					}
-
-					@Override
-					public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetBucket, String targetRemotePath, boolean isOverWrite){						
-						long startTime = System.nanoTime();
-						List<Blob> list = this._move(sourceBucket, sourceRemotePath, targetBucket, targetRemotePath, isOverWrite, 0);
-						log.debug(">> move.time:{}",TimeUnit.NANOSECONDS.toMillis(System.nanoTime()-startTime));
-						return list;						
-					}
-
-					@Override	
-					public List<String> listBuckets(){
-						List<String> list = new ArrayList<>();
-						Page<Bucket> buckets = storage.list();
-						buckets.iterateAll().forEach(bucket->list.add(bucket.getName()));
-					    return list;
-					}
-					
-					@Override
-					public List<Blob> listObjects(String bucket) {
-						return this.listObjects(bucket,"");
-					}
-					
-					@Override
-					public List<Blob> listObjects(String bucket, String remotePath) {
-						remotePath = trimSeparator(remotePath);
-						if(remotePath.indexOf("*")!=-1) {
-							remotePath = remotePath.substring(0,remotePath.indexOf("*"));
-						}else {
-							Blob object = storage.get(BlobId.of(bucket,remotePath));
-							if(object != null && !object.isDirectory()) {
-								List<Blob> result = new ArrayList<>();
-								result.add(object);
-								return result;
-							}
-						}
-						remotePath = trimSeparator(remotePath+"/");
-						
-						Page<Blob> blobs = storage.list(bucket,Storage.BlobListOption.currentDirectory(),Storage.BlobListOption.prefix(remotePath));
-						if(blobs == null) {return null;}
-	
-						List<Blob> result = new ArrayList<>();
-						blobs.iterateAll().forEach(blob->{result.add(blob);});
-						return result;
-					}
-					
-					@Override
-					public List<Blob> upload(String bucket, String storePath, String remotePath) throws IOException{
-						return this.upload(bucket, storePath, remotePath, UploadType.stream, true);
-					}
-					
-					@Override
-					public List<Blob> upload(String bucket, String storePath, String remotePath, UploadType uploadType) throws IOException{
-						return this.upload(bucket, storePath, remotePath, uploadType, true);
-					}
-					
-					@Override
-					public List<Blob> upload(String bucket, String storePath, String remotePath, boolean isOverWrite) throws IOException{
-						return this.upload(bucket, storePath, remotePath, UploadType.stream, isOverWrite);
-					}
-
-					@Override
-					public List<Blob> upload(String bucket, String storePath, String remotePath, UploadType uploadType, boolean isOverWrite) throws IOException{
-						long startTime = System.nanoTime();
-						List<Blob> list = this._upload(bucket, storePath, remotePath, uploadType, true);						
-						log.debug(">> upload.time:{}",TimeUnit.NANOSECONDS.toMillis(System.nanoTime()-startTime));
-						return list;
-					}
-					
-					@Override
-					public BucketObjectInfo download(String bucket, String remotePath, OutputStream outputStream) {
-						Blob blob = storage.get(BlobId.of(bucket, trimSeparator(remotePath)));
-					    blob.downloadTo(outputStream);    
-						return this._convert(blob,null,System.currentTimeMillis());
-					}
-					
-					@Override
-					public BucketObjectInfo download(String bucket, String remotePath, File file) {
-						Blob blob = storage.get(BlobId.of(bucket, trimSeparator(remotePath)));
-						String downloadStorePath = file.getPath();			
-						if(file.exists()) {file.delete();}
-					    blob.downloadTo(Paths.get(downloadStorePath));
-						return this._convert(blob,downloadStorePath,System.currentTimeMillis());
-					}
-					
-					@Override
-					public List<BucketObjectInfo> download(String bucket, String remotePath, String storePath) {
-						return this.download(bucket, remotePath, storePath, true);
-					}
-					
-					@Override
-					public List<BucketObjectInfo> download(String bucket, String remotePath, String storePath, boolean isOverWrite) {
-						long startTime = System.nanoTime();
-						List<BucketObjectInfo> list = this._download(bucket, remotePath, storePath, isOverWrite);
-						log.debug(">> download.time:{}",TimeUnit.NANOSECONDS.toMillis(System.nanoTime()-startTime));
-						return list;
-					}
-					
-					@Override
-					public void delete(String bucket, String remotePath) {
-						this._delete(bucket, remotePath);
-					}
-
-					@Override
-					public Blob compose(String bucket, List<String> sources, String targetPath) {
-						return this._compose(bucket, sources, targetPath);
-					}
-					
-					private BucketObjectInfo _convert(Blob blob, String downloadStorePath, long downloadTime) {
-						BucketObjectInfo info = new BucketObjectInfo();				
-						info.setSystemInfo(getSystemInfo());
-						info.setBucket(blob.getBucket());
-						info.setPath(blob.getName());
-						info.setName(extractFileName(blob.getName()));
-						info.setSize(blob.getSize());
-						info.setDirectory(blob.isDirectory());
-						info.setCreateTime(blob.getCreateTimeOffsetDateTime() == null?null:blob.getCreateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()));			
-						info.setUpdateTime(blob.getUpdateTimeOffsetDateTime() == null?null:blob.getUpdateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()));
-						info.setContentType(blob.getContentType());
-						info.setDownloadStorePath(downloadStorePath);
-						info.setDownloadTime(downloadTime);				
-						info.setMetadata(blob.getMetadata());
-						return info;
-					}
-					
-					private void _delete(String bucket, String remotePath) {
-						Blob blob = storage.get(bucket, remotePath);
-						if(blob != null) {
-							storage.delete(bucket, remotePath, Storage.BlobSourceOption.generationMatch(blob.getGeneration()));
-						}else {											
-							Page<Blob> page = storage.list(bucket,Storage.BlobListOption.prefix(remotePath));
-							if(page.iterateAll().iterator().hasNext()) {
-								for(Blob item : page.iterateAll()){
-									if(item.isDirectory()) {continue;}
-									this.delete(bucket,item.getName());
-								}								
-							}
-						}
-						return;
-					}
-										
-					private Blob _compose(String bucket, List<String> sources, String targetPath) {
-						if(sources == null || sources.isEmpty()) {throw new SystemException("sources is null or empty.");}
-
-						if(sources.size()>32) {
-							String fileName = extractFileName(targetPath);
-							String subPath = targetPath.replace(fileName,"")+System.nanoTime()+fileName;
-							
-							Blob composeBlob = null;
-							Blob subBlob = null;
-							try{
-								List<String> subList1 = sources.subList(0,32);					
-								subBlob = this._compose(bucket, subList1, subPath);
-								
-								List<String> subList2 = sources.subList(32,sources.size());
-								subList2.set(0, subBlob.getName());
-								
-								composeBlob = this._compose(bucket, subList2, targetPath);
-
-							} catch(Exception e) {
-								throw new SystemException(e.getMessage());
-							} finally {
-								if(subBlob != null) {subBlob.delete();}
-							}
-							
-							return composeBlob;							
-						}
-						
-					    Storage.BlobTargetOption precondition;
-					    if (storage.get(bucket, targetPath) == null) {
-					    	precondition = Storage.BlobTargetOption.doesNotExist();
-					    } else {
-					    	precondition = Storage.BlobTargetOption.generationMatch(storage.get(bucket, targetPath).getGeneration());
-					    }
-						
-					    Storage.ComposeRequest composeRequest = Storage.ComposeRequest.newBuilder()
-					                .addSource(sources)
-					                .setTarget(BlobInfo.newBuilder(bucket, targetPath).build())
-					                .setTargetOptions(precondition)
-					                .build();
-					    
-					    return storage.compose(composeRequest);
-					}
-					
-					private List<Blob> _move(String sourceBucket, String sourceRemotePath, String targetBucket, String targetRemotePath, boolean isOverWrite, int depth) {
-						sourceRemotePath = trimSeparator(sourceRemotePath);
-						targetRemotePath = trimSeparator(targetRemotePath);
-
-						//같은 버킷, 같은 경로 체크
-						if(sourceBucket.equals(targetBucket) && sourceRemotePath.equals(targetRemotePath)) {
-							log.debug(">> [move] same bucket, same path");
-							return null;
-						}
-
-						//소스원격경로가 파일인 경우
-						Blob blob = storage.get(BlobId.of(sourceBucket, sourceRemotePath));
-						log.debug(">> [move] blob({}): {}",sourceRemotePath,blob != null?"object":"directory");
-
-			        	if(blob != null && !blob.isDirectory()) {
-
-							//덮어쓰기가 아닌 경우, 존재여부 검사 
-							if(!isOverWrite && this.isExistObject(targetBucket, targetRemotePath)){
-								throw new SystemException("버킷("+targetBucket+") 타켓원격경로("+targetRemotePath+")에 동일이름의 파일이 존재 합니다.");
-							}
-							
-							//타켓원격경로가 "/" 로 끝나면 폴더로 인지하여 업로드 할 파일의 이름을 붙여서 원격경로에 저장할 이름을 만듬
-							//타켓원격경로가 존재하는 디렉토리면 이 디렉토리에 저장할 이름을 만듬
-							boolean isEnable=false;
-							if(!isEnable && (targetRemotePath.substring(targetRemotePath.length()-1).equals("/"))) {isEnable=true;}
-							if(!isEnable && (this.isExistObject(targetBucket, targetRemotePath) && this.isDirectory(targetBucket, targetRemotePath))) {isEnable=true;}
-							if(isEnable) {targetRemotePath = trimSeparator(targetRemotePath+"/"+extractFileName(sourceRemotePath));}
-														
-							BlobId source = BlobId.of(sourceBucket, sourceRemotePath);
-							BlobId target = BlobId.of(targetBucket, targetRemotePath);
-							
-							Storage.BlobTargetOption precondition;
-							blob = storage.get(targetBucket, targetRemotePath);
-							if(blob == null){
-								precondition = Storage.BlobTargetOption.doesNotExist();
-							}else{
-								precondition = Storage.BlobTargetOption.generationMatch(blob.getGeneration());
-							}
-	
-						    storage.copy(Storage.CopyRequest.newBuilder().setSource(source).setTarget(target, precondition).build());
-						    blob = storage.get(target);
-						    if(depth == 0) {
-						    	storage.get(source).delete();
-						    }
-
-						    List<Blob> list = new ArrayList<>();
-						    list.add(blob);
-						    
-						    return list;
-			        	}
-			        	
-						//소스원격경로(폴더) to 타켓원격경로(파일) 체크
-						if(!this.isDirectory(targetBucket, targetRemotePath)) {
-							throw new SystemException("폴더("+sourceRemotePath+")를 파일("+targetRemotePath+")로 이동 할 수 없습니다.");
-						}
-								        	
-			        	//소스원격경로(폴더) empty 체크
-						Page<Blob> sourcePage = storage.list(sourceBucket,Storage.BlobListOption.prefix(sourceRemotePath));
-						if(!sourcePage.iterateAll().iterator().hasNext()) {
-							log.debug(">> [move] sourceRemotePath({}) is empty.",sourceRemotePath);
-							return null;
-						}
-						
-						//타켓원격경로(폴더)로 이동
-						List<Blob> list = new ArrayList<>();
-						try{
-							if(!isOverWrite && depth == 0) {
-								Page<Blob> targetPage = storage.list(sourceBucket,Storage.BlobListOption.prefix(targetRemotePath));
-								if(targetPage.iterateAll().iterator().hasNext()) {
-									final Set<String> checkSet = new HashSet<>();
-									final String _sPath = sourceRemotePath;
-									final String _tPath = targetRemotePath;
-									targetPage.iterateAll().forEach(t->{
-										//log.debug(">> [move] target name:{}",t.getName());
-										checkSet.add(t.getName().replace(_tPath,_sPath));
-									});				
-									sourcePage.iterateAll().forEach(s->{
-										if(checkSet.contains(s.getName())) {
-											throw new SystemException("소스원격경로("+s.getName()+")와 타켓원격경로("+s.getName().replace(_sPath,_tPath)+")의 파일 이름이 동일합니다.");
-										}
-									});
-								}
-							}
-							
-							for(Blob item : sourcePage.iterateAll()){
-								String _sourceRemotePath = item.getName();
-								String _targetRemotePath = item.getName().replace(sourceRemotePath,targetRemotePath);
-								depth=1;
-								List<Blob> _list = this._move(sourceBucket, _sourceRemotePath, targetBucket, _targetRemotePath, isOverWrite, depth++);
-								list.addAll(_list);
-								depth--;
-							}
-						}catch(Exception e){
-							//오류시 이동 파일 삭제
-							if(list != null && !list.isEmpty()) {
-								list.forEach(i->{
-									log.debug(">> [move] 예외발생 이동된 파일 삭제:{}",i.getName());
-									this.delete(targetBucket,i.getName());
-								});
-							}
-							throw e;
-						}
-						
-						//소스원격경로(폴더) 객체 삭제
-						if(depth==1) {
-							for(Blob item : sourcePage.iterateAll()){
-								if(item.isDirectory()) {continue;}
-								this.delete(sourceBucket,item.getName());
-							}
-						}
-						
-					    return list;
-					}
-									
-					private List<Blob> _upload(String bucket, String storePath, String remotePath, UploadType uploadType, boolean isOverWrite) throws IOException{
-						if(StringUtils.isBlank(storePath)) {throw new SystemException("storePath("+storePath+") is empty.");}
-						
-						//업로드할 대상 존재여부 체크
-						File source = new File(FilenameUtils.normalize(storePath));				
-						if(!source.exists()) {log.debug(">> [upload] storePath({}) is not exist.",storePath); return null;}
-						
-						List<Blob> list = new ArrayList<>();
-
-						//업로드할 대상이 파일인 경우
-						if(source.isFile()) {
-							String _remotePath = trimSeparator(remotePath);
-							if(_remotePath.equals("")) {_remotePath=source.getName();}
-
-							//원격경로가 "/" 로 끝나면 폴더로 인지하여 업로드 할 파일의 이름을 붙여서 원격경로에 저장할 이름을 만듬
-							//원격경로가 존재하는 디렉토리면 이 디렉토리에 저장할 이름을 만듬
-							boolean isEnable=false;
-							if(!isEnable && (_remotePath.substring(_remotePath.length()-1).equals("/"))) {isEnable=true;}
-							if(!isEnable && (this.isExistObject(bucket, _remotePath) && this.isDirectory(bucket, _remotePath))) {isEnable=true;}
-							if(isEnable) {_remotePath = trimSeparator(_remotePath+"/"+source.getName());}
-							
-							//덮어쓰기가 아닌 경우, 존재여부 검사 
-							if(!isOverWrite && this.isExistObject(bucket, _remotePath)){
-								throw new SystemException("버킷("+bucket+") 원격경로("+_remotePath+")에 동일이름의 파일이 존재 합니다.");
-							}
-							
-							//blobInfo 구성
-							log.debug(">> [upload] remotePath: {}",_remotePath);
-							BlobId blobId = BlobId.of(bucket, _remotePath);
-							BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();			
-							
-							//precondition(createFrom수행시 사전체크 사항) 구성
-						    Storage.BlobWriteOption precondition;
-						    Blob blob = storage.get(bucket, _remotePath);
-						    if(blob == null){
-						    	//요청이 실행되기 전에 객체가 생성되면 요청이 실패
-						    	precondition = Storage.BlobWriteOption.doesNotExist();
-						    } else {
-						    	//요청이 실행되기 전에 기존 객체의 세대(generation)가 변경되면 요청이 실패, 다른 곳(process or thread)에서 변경여부 체크
-						    	precondition = Storage.BlobWriteOption.generationMatch(blob.getGeneration());
-						    }							
-
-							if(uploadType.equals(UploadType.buffer)) {				
-								list.add(storage.createFrom(blobInfo, Paths.get(storePath), precondition));
-							}
-
-							if(uploadType.equals(UploadType.stream)) {					
-								list.add(storage.createFrom(blobInfo, new FileInputStream(new File(storePath)), precondition));
-							}
-						}
-						
-						//업로드할 대상이 폴더인 경우						
-						if(source.isDirectory()) {
-							
-							if(!this.isDirectory(bucket, remotePath)) {
-								throw new SystemException("폴더("+storePath+")를 파일("+remotePath+")로 업로드 할 수 없습니다.");
-							}
-							
-							String _storePath = trimSeparator("/"+storePath);
-							String _basePath = trimSeparator(remotePath);					
-							String _remotePath = "";
-
-							for(File f : source.listFiles()) {
-								String _sourcePath = trimSeparator("/"+f.getPath());
-								if(f.isFile()) {_remotePath = _basePath + "/" + f.getName();}
-								if(f.isDirectory()) {_remotePath = _basePath + "/" + _sourcePath.replace(_storePath,"");}
-								_remotePath = trimSeparator(_remotePath);
-
-								if(!_remotePath.equals("")) {
-									try{
-										List<Blob> _list = this._upload(bucket, f.getPath(), _remotePath, uploadType, isOverWrite);
-										list.addAll(_list);
-									}catch(Exception e) {
-										if(list != null && !list.isEmpty()) {
-											list.forEach(i->{
-												log.debug(">> [upload] 예외발생 업로드된 파일 삭제:{}",i.getName());
-												delete(bucket,i.getName());
-											});
-										}
-										throw e;
-									}
-								}
-							}
-						}
-
-						return list;
-					}
-						
-					private List<BucketObjectInfo> _download(String bucket, String remotePath, String storePath, boolean isOverWrite) {
-
-						//다운로드 대상인 원격지경로가 파일인 경우 다운로드
-						Blob blob = storage.get(BlobId.of(bucket, trimSeparator(remotePath)));
-						log.debug(">> [download] blob({}): {}",remotePath,blob != null?"object":"directory");				
-			        	if(blob != null && !blob.isDirectory()) {
-			        		
-			        		String downloadStorePath = storePath;
-			        		
-			        		//디렉토리 경로 생성 후 다운로드 경로 구성
-			        		File target = new File(FilenameUtils.normalize(downloadStorePath));			        		
-			        		if(!target.exists()) {target.mkdirs();target.delete();}			        		
-			        		if(target.isDirectory()) {	        			
-			        			downloadStorePath = downloadStorePath + File.separator + extractFileName(blob.getName());
-			        			downloadStorePath = FilenameUtils.normalize(downloadStorePath);
-			        		}
-
-							//덮어쓰기가 아닌 경우, 존재여부 검사 
-							if(!isOverWrite && (new File(FilenameUtils.normalize(downloadStorePath))).exists()) {
-								throw new SystemException("다운로드경로("+downloadStorePath+")에 파일이 존재 합니다. (isOverWrite:"+isOverWrite+")");
-							}
-
-							//다운로드 수행
-						    blob.downloadTo(Paths.get(downloadStorePath));
-						    
-						    List<BucketObjectInfo> list = new ArrayList<>();
-						    list.add(this._convert(blob,downloadStorePath,System.currentTimeMillis()));
-						    
-							return list;    		
-			        	}
-			        	
-			        	//다운로드 대상인 원격지경로(폴더) empty 체크
-			        	String prefix = trimSeparator(remotePath);
-						Page<Blob> page = storage.list(bucket,Storage.BlobListOption.prefix(prefix));
-						if(!page.iterateAll().iterator().hasNext()) {
-							log.debug(">> [download] remotePath({}) is empty.",remotePath);
-							return null;
-						}
-						
-						//저장경로가 파일인 경우 체크 
-						File file = new File(FilenameUtils.normalize(storePath));
-						if(file.exists() && file.isFile()) {
-							throw new SystemException("저장경로("+storePath+")가 파일입니다. 원격지경로("+remotePath+")가 폴더인 경우 존재하는 파일경로를 지정 할 수 없습니다.");
-						}
-						
-						//다운로드 대상인 원격지경로가 폴더인 경우 다운로드
-						List<BucketObjectInfo> list = new ArrayList<>();												
-						try{
-							file.mkdirs();				
-							String baseStorePath = file.getPath();
-														
-					        for(Blob item : page.iterateAll()) {
-					        	if(item.isDirectory()) {continue;}
-					        	
-					        	//slash(/)로 끝나면 디렉토리이며, 디렉토리는 skip
-					        	String appendpath = item.getName().replace(remotePath,"");
-					        	if(appendpath.substring(appendpath.length()-1).equals("/")) {continue;}
-					        	
-					        	//다운로드저장경로 만들기
-					        	String downloadStorePath = FilenameUtils.normalize(baseStorePath+"/"+appendpath);
-					        	log.debug(">> [download] downloadStorePath:{}",downloadStorePath);
-					        	
-					        	//다운로드저장경로 디렉토리 구성
-					        	File _dirPath = new File(FilenameUtils.getFullPath(downloadStorePath));
-					        	_dirPath.mkdirs();
-					        	
-								//덮어쓰기가 아닌 경우, 존재여부 검사 
-								if(!isOverWrite && (new File(FilenameUtils.normalize(downloadStorePath))).exists()) {
-									throw new SystemException("저장경로에("+downloadStorePath+")에 동일이름 파일이 존재 합니다. (isOverWrite:\"+isOverWrite+\")");
-								}
-								
-								//다운로드 수행
-								blob = storage.get(BlobId.of(bucket, item.getName()));
-							    blob.downloadTo(Paths.get(downloadStorePath));
-							    
-							    list.add(this._convert(blob,downloadStorePath,System.currentTimeMillis()));
-					        }
-						
-						}catch(Exception e){
-							//오류시 저장된 파일 삭제
-							if(list != null && !list.isEmpty()) {
-								list.forEach(i->{
-									log.debug(">> [download] 예외발생 다운로드된 파일 삭제:{}",i.getName());
-									(new File(i.getDownloadStorePath())).delete();
-								});
-							}
-							throw e;
-						}
-						
-				        return list;
-					}
-						
-				};
-			}
-			
-			private String extractFileName(String path) {
-				path = path.trim();
-				String name = path.substring(path.lastIndexOf("\\")+1, path.length());
-				if(path.indexOf("/")>-1) {name = path.substring(path.lastIndexOf("/")+1, path.length());}
-				return name;
-			}
-		    
-		    private String trimSeparator(String path) {
-		    	path = path.trim();
-		    	if(path.indexOf("\\") == 0) {path = path.replace("\\","/");}
-				if(path.indexOf("/") == 0) {path = this.trimSeparator(path.substring(1));}
-				while(path.indexOf("//")>0) {path = path.replace("//","/");}				
-				return path;
-			}	
-		    
-		    private Map<String,String> getSystemInfo() {
-		    	if(this.systemInfo == null) {
-			    	SystemUtil systemUtil = beanAid.getBean(SystemUtil.class);
-			    	this.systemInfo=systemUtil.getSystemInfo();
-		    	}
-		    	return this.systemInfo;
-		    }
-
-		};
-		
-	}
-	
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-}

+ 0 - 156
src/main/java/kr/co/iya/base/support/util/KafkaUtilPack.java

@@ -1,156 +0,0 @@
-package kr.co.iya.base.support.util;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.UUID;
-
-import org.apache.kafka.common.header.Headers;
-import org.springframework.kafka.support.KafkaHeaders;
-import org.springframework.messaging.MessageHeaders;
-
-import com.fasterxml.jackson.core.type.TypeReference;
-
-import kr.co.iya.base.config.KafkaConfig.KafkaProperties;
-import kr.co.iya.base.support.util.JsonUtilPack.JsonUtil;
-import kr.co.iya.base.support.util.ObjectMapperUtilPack.ObjectMapperUtil;
-import kr.co.iya.base.support.util.SystemUtilPack.SystemUtil;
-import kr.co.iya.base.support.util.TimeUtilPack.TimeUtil;
-import lombok.Builder;
-import lombok.Data;
-
-//@Slf4j
-public class KafkaUtilPack {
-	
-	public interface KafkaUtil {
-		
-		public static final String OFFSET = KafkaHeaders.OFFSET;
-		public static final String CONSUMER = KafkaHeaders.CONSUMER;
-		public static final String TIMESTAMP_TYPE = KafkaHeaders.TIMESTAMP_TYPE;
-		public static final String RECEIVED_TOPIC = KafkaHeaders.RECEIVED_TOPIC;
-		public static final String RECEIVED_PARTITION = KafkaHeaders.RECEIVED_PARTITION;
-		public static final String RECEIVED_KEY = KafkaHeaders.RECEIVED_KEY;		
-		public static final String RECEIVED_TIMESTAMP = KafkaHeaders.RECEIVED_TIMESTAMP;
-
-		public static final String KAFKA_PREFIX = "kafka_";
-		public static final String DEFAULT_PREFIX = "x-utcb-";
-		public static final String DEFAULT_HEADER_KEY_UUID = DEFAULT_PREFIX+"uuid";	
-		public static final String DEFAULT_HEADER_KEY_SERVICE_NAME = DEFAULT_PREFIX+"publisherService";
-		public static final String DEFAULT_HEADER_KEY_SERVER_IP = DEFAULT_PREFIX+"publisherServerIp";	
-        
-		public String getBootstrapServers();
-		
-		public Map<String,String> buildHeader();	
-		public Map<String,String> addDefault(Map<String,String> header);		
-		public Map<String,String> convertMap(Headers headers);
-		public Map<String,String> convertMap(MessageHeaders headers);
-		public String extract(Headers headers,String key);
-		public String extract(MessageHeaders headers,String key);
-		public String convertFormatDate(long timestamp);
-
-		public String toJson(Object payload);
-		public <T> T readValue(String payload, Class<T> type);
-		public <T> T readValue(String content, TypeReference<T> valueTypeRef);		
-	}
-
-	@Data @Builder
-	public static class KafkaMeta {
-		private String applicationName;
-		private KafkaProperties properties;
-		private ObjectMapperUtil objectMapperUtil;
-		private JsonUtil jsonUtil;
-		private TimeUtil timeUtil;
-		private SystemUtil systemUtil;
-	}
-	
-	public static KafkaUtil getKafkaUtil(KafkaMeta meta) {
-		
-		return new KafkaUtil() {
-
-			@Override
-			public String getBootstrapServers() {return meta.getProperties().getBootstrapServers();}
-			
-			@Override
-			public Map<String, String> buildHeader() {
-				Map<String,String> map = new HashMap<>();
-				map.put(DEFAULT_HEADER_KEY_UUID, UUID.randomUUID().toString());
-				map.put(DEFAULT_HEADER_KEY_SERVICE_NAME, meta.getApplicationName());
-				map.put(DEFAULT_HEADER_KEY_SERVER_IP, meta.getSystemUtil().getServerIp());				
-				return map;
-			}
-						
-			@Override
-			public Map<String,String> addDefault(Map<String,String> header){
-				if(header==null) {return this.buildHeader();}
-				Map<String,String> map = new HashMap<>(header);	
-				for(String key:map.keySet()) {
-					if(key.equals(DEFAULT_HEADER_KEY_UUID)||key.equals(DEFAULT_HEADER_KEY_SERVICE_NAME)||key.equals(DEFAULT_HEADER_KEY_SERVER_IP)) {continue;}
-					if(key.startsWith(KAFKA_PREFIX) || key.startsWith(DEFAULT_PREFIX)) {throw new RuntimeException("header key can't start with ["+KAFKA_PREFIX+","+DEFAULT_PREFIX+"]");}
-				}			
-				if(!header.containsKey(DEFAULT_HEADER_KEY_UUID)) {map.put(DEFAULT_HEADER_KEY_UUID, UUID.randomUUID().toString());}
-				if(!header.containsKey(DEFAULT_HEADER_KEY_SERVICE_NAME)) {map.put(DEFAULT_HEADER_KEY_SERVICE_NAME, meta.getApplicationName());}
-				if(!header.containsKey(DEFAULT_HEADER_KEY_SERVER_IP)) {map.put(DEFAULT_HEADER_KEY_SERVER_IP, meta.getSystemUtil().getServerIp());}
-				return header;
-			}	
-
-			@Override
-			public Map<String, String> convertMap(Headers headers) {
-				if(headers==null) {return null;}
-                Map<String,String> map = new HashMap<>();                
-                headers.forEach(h->map.put(h.key(),new String(h.value())));
-				return map;
-			}
-			
-			@Override
-			public Map<String,String> convertMap(MessageHeaders headers){
-				if(headers==null) {return null;}
-		        Map<String,String> map = new HashMap<>();
-		        headers.keySet().forEach(key->{
-		        	Object value = headers.get(key);
-		        	map.put(key, key.startsWith(KAFKA_PREFIX)?String.valueOf(value):new String((byte[])value));		        	
-		        });		        
-				return map;				
-			}
-			
-			@Override
-			public String extract(Headers headers,String key) {
-				if(headers==null) {return null;}
-				try{headers.forEach(h->{if(h.key().equals(key)) {throw new RuntimeException(new String((byte[])h.value()));}});}
-				catch(Exception e) {return e.getMessage();}
-				return null;
-			}
-			
-			@Override
-			public String extract(MessageHeaders headers,String key) {
-				boolean isEnable=true;
-				if(isEnable && headers==null) {isEnable=false;}
-				if(isEnable && (key==null || key.equals(""))) {isEnable=false;}	
-				if(isEnable && !headers.containsKey(key)) {isEnable=false;}
-				return isEnable?new String(headers.get(key,byte[].class)):null;
-			}
-
-			@Override 
-			public String convertFormatDate(long timestamp) {return meta.getTimeUtil().getFormatDate(timestamp,"yyyy-MM-dd HH:mm:ss.SSS");}
-
-			@Override
-			public String toJson(Object obj) {
-				return meta.getJsonUtil().toJson(obj);
-			}
-
-			@Override
-			public <T> T readValue(String payload, Class<T> type) {
-				try{return meta.getObjectMapperUtil().getMapper().readValue(payload, type);}
-				catch(Exception e){throw new RuntimeException(e.getCause());}
-			}
-
-			@Override
-			public <T> T readValue(String payload,TypeReference<T> valueTypeRef) {
-				try{return meta.getObjectMapperUtil().getMapper().readValue(payload, valueTypeRef);}
-				catch(Exception e){throw new RuntimeException(e.getCause());}
-			}
-		};
-	};
-
-	//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-	
-}